diff --git a/backend/app/agent/llm_factory.py b/backend/app/agent/llm_factory.py index e0ca134..0dbe8ac 100644 --- a/backend/app/agent/llm_factory.py +++ b/backend/app/agent/llm_factory.py @@ -51,7 +51,7 @@ class LLMFactory: # 模型创建器映射 CREATORS = { + "zhipu": create_zhipu, "local": create_local, "deepseek": create_deepseek, - "zhipu": create_zhipu, } \ No newline at end of file diff --git a/backend/app/agent/service.py b/backend/app/agent/service.py index 2146daf..6af4c5f 100644 --- a/backend/app/agent/service.py +++ b/backend/app/agent/service.py @@ -42,7 +42,7 @@ class AIAgentService: raise RuntimeError("没有可用的模型") return self - async def process_message(self, message: str, thread_id: str, model: str = "local", user_id: str = "default_user") -> dict: + async def process_message(self, message: str, thread_id: str, model: str = "zhipu", user_id: str = "default_user") -> dict: """处理用户消息,返回包含回复、token统计和耗时的字典""" if model not in self.graphs: # 回退到第一个可用模型 diff --git a/frontend/src/config.py b/frontend/src/config.py index f9631d5..509855c 100644 --- a/frontend/src/config.py +++ b/frontend/src/config.py @@ -51,7 +51,7 @@ class FrontendConfig: layout: str = "wide" # ==================== 模型配置(固定值,无需环境变量) ==================== - default_model: str = "local" + default_model: str = "zhipu" model_options: Optional[dict] = None # ==================== 用户配置(固定值,无需环境变量) ==================== @@ -72,9 +72,9 @@ class FrontendConfig: """初始化后处理 - 设置默认值和加载环境变量""" if self.model_options is None: self.model_options = { + "zhipu": "智谱 GLM-4.7-Flash(在线)", "local": "本地 llama.cpp(Gemma-4)", - "deepseek": "DeepSeek V3.2(在线)", - "zhipu": "智谱 GLM-4.7-Flash(在线)" + "deepseek": "DeepSeek V3.2(在线)" } # 从环境变量加载配置(优先级最高)