fix: 前端支持显示兜底输出 (node: fallback)
All checks were successful
构建并部署 AI Agent 服务 / deploy (push) Successful in 6m0s

This commit is contained in:
2026-05-01 10:22:48 +08:00
parent c90471d67d
commit f44050d777

View File

@@ -143,7 +143,7 @@ def _handle_ai_response():
# 1. 处理 LLM Token 流 (打字机效果)
if event_type == "llm_token":
# 确保只处理来自 LLM 的 token避免将工具的输出作为 token 显示
if event.get("node") == "llm_call":
if event.get("node") in ("llm_call", "fallback"):
token = str(event.get("token", ""))
reasoning_token = str(event.get("reasoning_token", ""))