fix: 前端支持显示兜底输出 (node: fallback)
All checks were successful
构建并部署 AI Agent 服务 / deploy (push) Successful in 6m0s
All checks were successful
构建并部署 AI Agent 服务 / deploy (push) Successful in 6m0s
This commit is contained in:
@@ -143,7 +143,7 @@ def _handle_ai_response():
|
|||||||
# 1. 处理 LLM Token 流 (打字机效果)
|
# 1. 处理 LLM Token 流 (打字机效果)
|
||||||
if event_type == "llm_token":
|
if event_type == "llm_token":
|
||||||
# 确保只处理来自 LLM 的 token,避免将工具的输出作为 token 显示
|
# 确保只处理来自 LLM 的 token,避免将工具的输出作为 token 显示
|
||||||
if event.get("node") == "llm_call":
|
if event.get("node") in ("llm_call", "fallback"):
|
||||||
token = str(event.get("token", ""))
|
token = str(event.get("token", ""))
|
||||||
reasoning_token = str(event.get("reasoning_token", ""))
|
reasoning_token = str(event.get("reasoning_token", ""))
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user