diff --git a/README.md b/README.md index 18f3843..1588ae3 100644 --- a/README.md +++ b/README.md @@ -85,8 +85,8 @@ graph TB AgentService -->|模型路由| ChatServices[模型服务层 chat_services] ChatServices -->|自动降级| FallbackChain[FallbackServiceChain] - FallbackChain -->|创建| Zhipu[智谱 GLM-4.7] - FallbackChain -->|创建| DeepSeek[DeepSeek Reasoner] + FallbackChain -->|创建| Zhipu[智谱 GLM-5.1] + FallbackChain -->|创建| DeepSeek[DeepSeek V4-Pro] FallbackChain -->|创建| LocalGemma[本地 Gemma-4] AgentService -->|初始化| LangGraph[LangGraph 工作流引擎] @@ -1560,8 +1560,8 @@ streamlit run frontend/src/frontend_main.py ### 多模型切换 1. 在左侧边栏选择模型: - - **智谱 GLM-4.7**:在线服务,速度快 - - **DeepSeek Reasoner**:深度推理模型 + - **智谱 GLM-5.1**:在线服务,速度快 + - **DeepSeek V4-Pro**:深度推理模型 - **本地 Gemma-4**:本地部署,隐私性好 2. 可随时切换,甚至在同一会话中 diff --git a/frontend/src/components/chat_area.py b/frontend/src/components/chat_area.py index 620311f..411e1cb 100644 --- a/frontend/src/components/chat_area.py +++ b/frontend/src/components/chat_area.py @@ -156,7 +156,7 @@ def _handle_ai_response(): display_text = raw_text is_thinking = False - # 1. 原生 API 推理模式 (如 DeepSeek-Reasoner) + # 1. 原生 API 推理模式 (如 DeepSeek-V4-Pro) if api_thought: is_thinking = not bool(raw_text.strip()) diff --git a/frontend/src/config.py b/frontend/src/config.py index 509855c..2ab205a 100644 --- a/frontend/src/config.py +++ b/frontend/src/config.py @@ -72,9 +72,9 @@ class FrontendConfig: """初始化后处理 - 设置默认值和加载环境变量""" if self.model_options is None: self.model_options = { - "zhipu": "智谱 GLM-4.7-Flash(在线)", + "zhipu": "智谱 GLM-5.1(在线)", "local": "本地 llama.cpp(Gemma-4)", - "deepseek": "DeepSeek V3.2(在线)" + "deepseek": "DeepSeek V4-Pro(在线)" } # 从环境变量加载配置(优先级最高)