diff --git a/backend/app/agent/agent_service.py b/backend/app/agent/agent_service.py index 552f711..555c6b2 100644 --- a/backend/app/agent/agent_service.py +++ b/backend/app/agent/agent_service.py @@ -14,6 +14,7 @@ from ..model_services.chat_services import get_all_chat_services, LocalVLLMChatP from app.main_graph.utils.rag_initializer import init_rag_tool from app.core.intent_classifier import get_intent_classifier from app.logger import info, warning +from app.main_graph.state import MainGraphState, CurrentAction class AIAgentService: def __init__(self, checkpointer): @@ -120,8 +121,12 @@ class AIAgentService: "configurable": {"thread_id": thread_id}, "metadata": {"user_id": user_id} } - input_state = {"messages": [{"role": "user", "content": message}]} - context = GraphContext(user_id=user_id) + input_state = { + "user_query": message, + "messages": [{"role": "user", "content": message}], + "user_id": user_id, + "current_action": CurrentAction.NONE + } # ========== 新增:混合路由 ========== intent_result = await self.intent_classifier.classify(message) @@ -161,7 +166,6 @@ class AIAgentService: async for chunk in graph.astream( input_state, config=config, - context=context, stream_mode=["messages", "updates", "custom"], version="v2", subgraphs=True