功能:AgentConfig 统一为 VLM 多模态模型

去掉 llm_model/llm_base_url/llm_api_key,统一使用 vlm 前缀。
默认模型 qwen3-vl-flash-2026-01-22,通过 AGENT_VLM_MODEL 环境变量配置。
This commit is contained in:
2026-03-20 11:04:08 +08:00
parent 0d790d9992
commit d44fc410bb
2 changed files with 12 additions and 12 deletions

View File

@@ -61,7 +61,7 @@ WORK_ORDER_APP_ID=alarm-system
WORK_ORDER_APP_SECRET=your-app-secret
WORK_ORDER_TENANT_ID=1
# ===== 交互Agent配置 =====
# ===== 交互Agent配置(统一使用 VLM 多模态模型) =====
AGENT_ENABLED=false
AGENT_LLM_MODEL=qwen-plus # 文本对话模型(复用 DASHSCOPE_API_KEY
AGENT_LLM_TIMEOUT=15
AGENT_VLM_MODEL=qwen3-vl-flash-2026-01-22 # 多模态模型(文字+图片
AGENT_VLM_TIMEOUT=15

View File

@@ -67,11 +67,11 @@ class WeChatConfig:
@dataclass
class AgentConfig:
"""交互Agent配置"""
llm_api_key: str = ""
llm_base_url: str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
llm_model: str = "qwen-plus"
llm_timeout: int = 15
"""交互Agent配置(统一使用 VLM 多模态模型)"""
vlm_api_key: str = ""
vlm_base_url: str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
vlm_model: str = "qwen3-vl-flash-2026-01-22"
vlm_timeout: int = 15
enabled: bool = False
@@ -186,10 +186,10 @@ def load_settings() -> Settings:
group_chat_id=os.getenv("WECHAT_GROUP_CHAT_ID", ""),
),
agent=AgentConfig(
llm_api_key=os.getenv("DASHSCOPE_API_KEY", ""),
llm_base_url=os.getenv("AGENT_LLM_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1"),
llm_model=os.getenv("AGENT_LLM_MODEL", "qwen-plus"),
llm_timeout=int(os.getenv("AGENT_LLM_TIMEOUT", "15")),
vlm_api_key=os.getenv("DASHSCOPE_API_KEY", ""),
vlm_base_url=os.getenv("AGENT_VLM_BASE_URL", "https://dashscope.aliyuncs.com/compatible-mode/v1"),
vlm_model=os.getenv("AGENT_VLM_MODEL", os.getenv("VLM_MODEL", "qwen3-vl-flash-2026-01-22")),
vlm_timeout=int(os.getenv("AGENT_VLM_TIMEOUT", "15")),
enabled=os.getenv("AGENT_ENABLED", "false").lower() == "true",
),
work_order=WorkOrderConfig(