Files
iot-device-management-service/app/services/agent/graph.py

72 lines
2.1 KiB
Python
Raw Normal View History

"""
LangGraph StateGraph 构建
构建 assistant + ToolNode ReAct 接入 Dashscope Qwen
"""
from typing import Optional
from langgraph.graph import StateGraph, START, END
from langgraph.prebuilt import ToolNode, tools_condition
from langgraph.checkpoint.memory import MemorySaver
from langchain_openai import ChatOpenAI
from app.config import settings
from app.utils.logger import logger
from .state import AgentState
from .tools import all_tools
from .prompts import SYSTEM_PROMPT
def _create_llm():
"""创建 LLM 客户端(通过 Dashscope OpenAI 兼容接口对接 Qwen"""
return ChatOpenAI(
model=settings.agent.model,
base_url=settings.agent.vlm_base_url,
api_key=settings.agent.vlm_api_key,
timeout=settings.agent.timeout,
temperature=0.1,
)
def build_agent_graph(checkpointer=None):
"""构建并编译 Agent 图
Args:
checkpointer: LangGraph checkpointer 实例None=不持久化
Returns:
编译后的 CompiledGraph
"""
llm = _create_llm()
llm_with_tools = llm.bind_tools(all_tools)
def assistant(state: AgentState):
"""LLM 推理节点:接收消息 + 系统提示,返回回复或工具调用"""
system_msg = {"role": "system", "content": SYSTEM_PROMPT}
response = llm_with_tools.invoke([system_msg] + state["messages"])
return {"messages": [response]}
# 构建图
builder = StateGraph(AgentState)
# 两个核心节点
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(all_tools))
# 边START → assistant → (条件) → tools 或 END
builder.add_edge(START, "assistant")
builder.add_conditional_edges("assistant", tools_condition)
builder.add_edge("tools", "assistant")
# 编译
graph = builder.compile(checkpointer=checkpointer)
logger.info("LangGraph Agent 图已编译")
return graph
def create_default_graph():
"""创建带内存 checkpoint 的默认图(开发用)"""
checkpointer = MemorySaver()
return build_agent_graph(checkpointer=checkpointer)