node/py实现 qwen多轮对话

大模型多轮对话需要3点

  1. llm
  2. prompt
  3. RunnableWithMessageHistory

注意 agent 多轮对话不在此范围

node版本

  • 依赖
json 复制代码
"@langchain/anthropic": "^1.0.0",
"@langchain/community": "^1.0.2",
"@langchain/core": "^1.0.3",
"@langchain/langgraph": "^1.0.1",
"@langchain/langgraph-supervisor": "^1.0.0",
"@langchain/openai": "^1.0.0",
"axios": "^1.13.2",
"dotenv": "^17.2.3",
"langchain": "^1.0.2",
"langsmith": "^0.3.78",
"openai": "^6.8.1",
"zod": "^4.1.12"

实现

JavaScript 复制代码
import { ChatOpenAI } from "@langchain/openai";
import "dotenv/config";
import * as readline from 'readline';
import { ChatPromptTemplate,MessagesPlaceholder } from "@langchain/core/prompts";

import { FileSystemChatMessageHistory } from "@langchain/community/stores/message/file_system";
import { RunnableWithMessageHistory } from "@langchain/core/runnables";
import { StringOutputParser } from "@langchain/core/output_parsers";


// llm
const model = new ChatOpenAI({
  model: "qwen-plus",
  apiKey: process.env.OPENAI_API_KEY,  //填写你token
  configuration: {
    baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1",
  },
});

// prompt
const prompt = ChatPromptTemplate.fromMessages([
  [
    "system",
    "你是一个全科目的专家, 尽你所能的帮助用户回答问题, 根据用户的语言回答问题",
  ],
  new MessagesPlaceholder("chat_history"), // ! 注意实现多轮对话 需要message占位符来插入历史消息, 请根据实际token 控制对话轮数
  ["human", "{input}"],
]);

console.log("输入exit或q退出");


// 构建输出
const chain = prompt.pipe(model).pipe(new StringOutputParser());


// Runnable 接口的api
const chainWithHistory = new RunnableWithMessageHistory({
  runnable: chain,
  inputMessagesKey: "input",
  historyMessagesKey: "chat_history",
  getMessageHistory: async (sessionId) => {
    console.log("sessionId:", sessionId);
    const chatHistory = new FileSystemChatMessageHistory({
      sessionId,
      userId: "444", // 根据实际情况实现uuid,默认路径在当前目录 也可修改文件路径 `./chat_history/${sessionId}.json`
    });
    return chatHistory;
  },
});



// 创建 readline 接口
const rl = readline.createInterface({
  input: process.stdin,
  output: process.stdout
});

function askQuestion() {
  rl.question('问题: ', async (userContent) => {
    if (userContent.toLowerCase() === "exit" || userContent.toLowerCase() === "q") {
      rl.close();
      return;
    }

    try {
      // 使用用户实际输入的问题
      const resp = await chainWithHistory.invoke(
        { input: userContent },  // 改为使用 userContent
        { configurable: { sessionId: "langchain-test-session" } }
      );

      console.log('---------------------------');
      // resp 已经是字符串,直接输出
      console.log(resp);
      console.log('---------------------------');

    } catch (error) {
      console.error('Error:', error.message);
    }
    // 继续提问
    askQuestion();
  });
}

// 开始提问循环
askQuestion();

python 版本

python 复制代码
import uuid

from langchain_classic.agents.chat.prompt import HUMAN_MESSAGE

from langchain_core.output_parsers import StrOutputParser, JsonOutputParser
from langchain_community.chat_message_histories import ChatMessageHistory, FileChatMessageHistory
from langchain_core.runnables import RunnableWithMessageHistory, RunnableSequence

from app.agent.model.model import llm_qwen
from app.agent.prompts.multi_chat_prompt import multi_chat_prompt
from langchain_community.agent_toolkits.file_management import FileManagementToolkit
# from langgraph.prebuilt import create_react_agent
from langchain.agents import create_agent
from app.bailian.banlian_tools import add_tools

def get_session_history(session_id: str):
    # if session_id not in store:
    #     store[session_id] = ChatMessageHistory()
    return FileChatMessageHistory(f"{session_id}.json")

file_toolkit = FileManagementToolkit(root_dir="D:\code\ai-agent-test\.temp")
file_tools = file_toolkit.get_tools()

all_tools = file_tools + [add_tools]

agent = create_agent(model=llm_qwen, tools=all_tools)


llm_with_tools = llm_qwen.bind_tools(tools=all_tools)

chain = RunnableSequence(
    first=multi_chat_prompt,
    middle=[llm_with_tools],
    last=StrOutputParser()
)




# chat_history = ChatMessageHistory()
# chat_history.add_user_message("我是野猪佩奇,我们要做 浏览器自动化智能体项目(类似selenium自动化)")
# chat_history.add_ai_message("")
chain_with_history = RunnableWithMessageHistory(
    runnable=chain,
    get_session_history=get_session_history,
    input_messages_key="question",
    history_messages_key="chat_history",
)

# chat_session_id = uuid.uuid4()
chat_session_id = "1"

print(f"session_id: {chat_session_id}")


while True:
    user_input = input("用户:")
    if user_input.lower() == "exit" or user_input.lower() == "quit":
        break

    print("助理:", end="")
    for chunk in chain_with_history.stream(
        {"question": user_input},
        config={"configurable": {"session_id": chat_session_id}},
    ):
        print(chunk, end="")

    print("\n")
相关推荐
摸鱼的春哥8 小时前
春哥的Agent通关秘籍07:5分钟实现文件归类助手【实战】
前端·javascript·后端
念念不忘 必有回响8 小时前
viepress:vue组件展示和源码功能
前端·javascript·vue.js
C澒9 小时前
多场景多角色前端架构方案:基于页面协议化与模块标准化的通用能力沉淀
前端·架构·系统架构·前端框架
崔庆才丨静觅9 小时前
稳定好用的 ADSL 拨号代理,就这家了!
前端
江湖有缘9 小时前
Docker部署music-tag-web音乐标签编辑器
前端·docker·编辑器
恋猫de小郭10 小时前
Flutter Zero 是什么?它的出现有什么意义?为什么你需要了解下?
android·前端·flutter
崔庆才丨静觅16 小时前
hCaptcha 验证码图像识别 API 对接教程
前端
passerby606117 小时前
完成前端时间处理的另一块版图
前端·github·web components
掘了17 小时前
「2025 年终总结」在所有失去的人中,我最怀念我自己
前端·后端·年终总结
崔庆才丨静觅17 小时前
实用免费的 Short URL 短链接 API 对接说明
前端