62 lines
2.0 KiB
Python
62 lines
2.0 KiB
Python
"""
|
||
LangGraph 节点日志工具模块
|
||
提供状态流转追踪和 LLM 输入输出打印功能
|
||
"""
|
||
|
||
from app.config import ENABLE_GRAPH_TRACE
|
||
from app.logger import debug, info
|
||
|
||
|
||
def log_state_change(node_name: str, state: dict, prefix: str = "进入"):
|
||
"""
|
||
记录状态变化日志
|
||
|
||
Args:
|
||
node_name: 节点名称
|
||
state: 当前状态
|
||
prefix: 日志前缀("进入" 或 "离开")
|
||
"""
|
||
from app.logger import info
|
||
|
||
messages = state.get("messages", [])
|
||
msg_count = len(messages)
|
||
last_msg = messages[-1] if messages else None
|
||
last_info = ""
|
||
if last_msg:
|
||
# 兼容 dict 和对象两种格式
|
||
if isinstance(last_msg, dict):
|
||
content_preview = str(last_msg.get("content", ""))[:10].replace("\n", " ")
|
||
msg_type = last_msg.get("type", "unknown")
|
||
else:
|
||
content_preview = str(last_msg.content)[:10].replace("\n", " ")
|
||
msg_type = getattr(last_msg, 'type', 'unknown')
|
||
last_info = f"{msg_type.upper()}: {content_preview}"
|
||
info(f"🔄 [{node_name}] {prefix} | 消息数:{msg_count} | 最后一条:{last_info}")
|
||
|
||
|
||
def print_llm_input(prompt_value):
|
||
"""
|
||
RunnableLambda 回调函数:打印格式化后发送给 LLM 的完整消息
|
||
|
||
Args:
|
||
prompt_value: ChatPromptValue 对象,包含格式化后的消息列表
|
||
|
||
Returns:
|
||
原样返回 prompt_value,不影响链式调用
|
||
"""
|
||
if not ENABLE_GRAPH_TRACE:
|
||
return prompt_value
|
||
|
||
messages = prompt_value.messages # ChatPromptValue 提供 .messages 属性
|
||
|
||
debug("\n" + "=" * 80)
|
||
debug("📤 [LLM输入] 格式化后发送给大模型的完整消息:")
|
||
debug(f" 总消息数: {len(messages)}")
|
||
debug("-" * 80)
|
||
for i, msg in enumerate(messages):
|
||
content_preview = str(msg.content) # 完整输出
|
||
debug(f" [{i}] {msg.type.upper():10s}: {content_preview}")
|
||
debug("\n" + "=" * 80 + "\n")
|
||
|
||
return prompt_value
|