From 4fd2763be60f8b742810b31ddc16ae349e9facc1 Mon Sep 17 00:00:00 2001 From: root <953994191@qq.com> Date: Fri, 1 May 2026 11:06:57 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E5=89=8D=E7=AB=AF=E6=94=AF=E6=8C=81?= =?UTF-8?q?=E5=BF=AB=E9=80=9F=E8=B7=AF=E5=BE=84=E7=9A=84fast=5Fpath=20node?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- frontend/src/components/chat_area.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/chat_area.py b/frontend/src/components/chat_area.py index 4711773..207ad9a 100644 --- a/frontend/src/components/chat_area.py +++ b/frontend/src/components/chat_area.py @@ -143,7 +143,7 @@ def _handle_ai_response(): # 1. 处理 LLM Token 流 (打字机效果) if event_type == "llm_token": # 确保只处理来自 LLM 的 token,避免将工具的输出作为 token 显示 - if event.get("node") in ("llm_call", "fallback"): + if event.get("node") in ("llm_call", "fallback", "fast_path"): token = str(event.get("token", "")) reasoning_token = str(event.get("reasoning_token", ""))