From 1e15a0e550ab1aa184778708ccb073a00691211f Mon Sep 17 00:00:00 2001 From: root <953994191@qq.com> Date: Fri, 1 May 2026 13:53:25 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D:=20=E5=89=8D=E7=AB=AF?= =?UTF-8?q?=E6=94=AF=E6=8C=81final=5Fresponse=E8=8A=82=E7=82=B9=E7=9A=84to?= =?UTF-8?q?ken?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- frontend/src/components/chat_area.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/chat_area.py b/frontend/src/components/chat_area.py index 32323ea..6755228 100644 --- a/frontend/src/components/chat_area.py +++ b/frontend/src/components/chat_area.py @@ -175,7 +175,7 @@ def _handle_ai_response(): elif event_type == "llm_token": node_name = event.get("node", "unknown") # 确保只处理来自 LLM 的 token,避免将工具的输出作为 token 显示 - if node_name in ("llm_call", "fallback"): + if node_name in ("llm_call", "fallback", "final_response"): token = str(event.get("token", "")) reasoning_token = str(event.get("reasoning_token", ""))