Files
iot-device-management-service/.env.example
16337 d44fc410bb 功能:AgentConfig 统一为 VLM 多模态模型
去掉 llm_model/llm_base_url/llm_api_key,统一使用 vlm 前缀。
默认模型 qwen3-vl-flash-2026-01-22,通过 AGENT_VLM_MODEL 环境变量配置。
2026-03-20 11:04:08 +08:00

68 lines
1.8 KiB
Plaintext
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

# 数据库配置
DATABASE_URL=sqlite:///./data/alert_platform.db
# 腾讯云 COS 存储配置
COS_ENABLED=false
COS_SECRET_ID=your_secret_id
COS_SECRET_KEY=your_secret_key
COS_REGION=ap-beijing
COS_BUCKET=your-bucket-1250000000
COS_UPLOAD_PREFIX=alerts
COS_PRESIGN_EXPIRE=1800
COS_STS_EXPIRE=1800
# 应用配置
APP_HOST=0.0.0.0
APP_PORT=8000
DEBUG=true
DEV_MODE=true
# 大模型配置(可选)
AI_MODEL_ENDPOINT=http://localhost:8001
AI_MODEL_API_KEY=your_api_key
# Redis 配置(配置下发三层权威模型 - 云端层)
REDIS_ENABLED=true
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
REDIS_DB=0
REDIS_MAX_CONNECTIONS=50
# 边缘设备认证 Token边缘端上报告警时使用
# EDGE_TOKEN=your_edge_device_token
# ===== VLM 复核配置 =====
VLM_ENABLED=false
DASHSCOPE_API_KEY=your_dashscope_api_key
VLM_MODEL=qwen3-vl-flash-2026-01-22
VLM_TIMEOUT=10
# ===== 企微通知配置 =====
WECHAT_ENABLED=false
WECHAT_CORP_ID=your_corp_id
WECHAT_AGENT_ID=your_agent_id
WECHAT_SECRET=your_secret
WECHAT_TOKEN=your_callback_token
WECHAT_ENCODING_AES_KEY=your_encoding_aes_key
# 企微演示模式
WECHAT_TEST_UIDS= # 测试用企微userid逗号分隔如 zhangsan,lisi
SERVICE_BASE_URL= # H5页面公网地址如 https://vsp.viewshanghai.com
WECHAT_GROUP_CHAT_ID= # 告警群聊ID
# ===== WVP 后端地址 =====
WVP_API_BASE=http://127.0.0.1:18080
# ===== 工单对接 =====
WORK_ORDER_ENABLED=false
WORK_ORDER_BASE_URL=http://your-server:48080
WORK_ORDER_APP_ID=alarm-system
WORK_ORDER_APP_SECRET=your-app-secret
WORK_ORDER_TENANT_ID=1
# ===== 交互Agent配置统一使用 VLM 多模态模型) =====
AGENT_ENABLED=false
AGENT_VLM_MODEL=qwen3-vl-flash-2026-01-22 # 多模态模型(文字+图片)
AGENT_VLM_TIMEOUT=15