修复: 前端支持final_response节点的token
All checks were successful
构建并部署 AI Agent 服务 / deploy (push) Successful in 6m20s

This commit is contained in:
2026-05-01 13:53:25 +08:00
parent 9ed946cbe3
commit 1e15a0e550

View File

@@ -175,7 +175,7 @@ def _handle_ai_response():
elif event_type == "llm_token":
node_name = event.get("node", "unknown")
# 确保只处理来自 LLM 的 token避免将工具的输出作为 token 显示
if node_name in ("llm_call", "fallback"):
if node_name in ("llm_call", "fallback", "final_response"):
token = str(event.get("token", ""))
reasoning_token = str(event.get("reasoning_token", ""))