From 4722e2646a8df7b6345b22f0b55f0be17b264560 Mon Sep 17 00:00:00 2001 From: root <953994191@qq.com> Date: Fri, 24 Apr 2026 21:57:15 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E5=B0=86=E6=99=BA=E8=B0=B1=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E8=AE=BE=E4=B8=BA=E9=BB=98=E8=AE=A4=E9=A6=96=E8=A6=81?= =?UTF-8?q?=E9=80=89=E6=8B=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- backend/app/agent/llm_factory.py | 2 +- backend/app/agent/service.py | 2 +- frontend/src/config.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/app/agent/llm_factory.py b/backend/app/agent/llm_factory.py index e0ca134..0dbe8ac 100644 --- a/backend/app/agent/llm_factory.py +++ b/backend/app/agent/llm_factory.py @@ -51,7 +51,7 @@ class LLMFactory: # 模型创建器映射 CREATORS = { + "zhipu": create_zhipu, "local": create_local, "deepseek": create_deepseek, - "zhipu": create_zhipu, } \ No newline at end of file diff --git a/backend/app/agent/service.py b/backend/app/agent/service.py index 2146daf..6af4c5f 100644 --- a/backend/app/agent/service.py +++ b/backend/app/agent/service.py @@ -42,7 +42,7 @@ class AIAgentService: raise RuntimeError("没有可用的模型") return self - async def process_message(self, message: str, thread_id: str, model: str = "local", user_id: str = "default_user") -> dict: + async def process_message(self, message: str, thread_id: str, model: str = "zhipu", user_id: str = "default_user") -> dict: """处理用户消息,返回包含回复、token统计和耗时的字典""" if model not in self.graphs: # 回退到第一个可用模型 diff --git a/frontend/src/config.py b/frontend/src/config.py index f9631d5..509855c 100644 --- a/frontend/src/config.py +++ b/frontend/src/config.py @@ -51,7 +51,7 @@ class FrontendConfig: layout: str = "wide" # ==================== 模型配置(固定值,无需环境变量) ==================== - default_model: str = "local" + default_model: str = "zhipu" model_options: Optional[dict] = None # ==================== 用户配置(固定值,无需环境变量) ==================== @@ -72,9 +72,9 @@ class FrontendConfig: """初始化后处理 - 设置默认值和加载环境变量""" if self.model_options is None: self.model_options = { + "zhipu": "智谱 GLM-4.7-Flash(在线)", "local": "本地 llama.cpp(Gemma-4)", - "deepseek": "DeepSeek V3.2(在线)", - "zhipu": "智谱 GLM-4.7-Flash(在线)" + "deepseek": "DeepSeek V3.2(在线)" } # 从环境变量加载配置(优先级最高)