本文只包含最基础的请求后端大佬给得对话接口,大部分模型的传参是差不多的,核心还是如何处理 fetch 获取的流数据
ts
import { defineStore } from 'pinia';
import { ElMessage } from 'element-plus';
type Role = 'system' | 'user' | 'assistant';
export interface Message {
role: Role;
content: string;
}
interface ChatStore {
model: 'Gnosis' | 'chatglm2-6b' | 'cc-13b-chat';
chatApi: string;
messages: Message[];
done: boolean | null;
}
const DEFAULT_PROMPT = `你应该根据用户回答生成答案。答案必须尽可能简明扼要。字数控制在512个字符以内`;
export const useChatStore = defineStore({
id: 'chatStore',
state: (): ChatStore => {
return {
model: 'cc-13b-chat',
chatApi: 'chat_api/chat/knowledge_base_chat',
messages: [
{
role: 'system',
content: DEFAULT_PROMPT,
},
],
done: null,
};
},
actions: {
async fetchChat(query: string) {
this.messages.push({
role: 'user',
content: query,
});
let response: any = null;
try {
response = await fetch(this.chatApi, {
headers: {
'Content-Type': 'application/json',
},
method: 'POST',
body: JSON.stringify({
query: query,
history: this.messages,
model_name: this.model,
stream: true,
knowledge_base_name: 'lb_test',
top_k: 1,
score_threshold: 1,
temperature: 0.7,
max_tokens: 4096,
prompt_name: 'default',
}),
});
} catch (error) {
console.log(error);
ElMessage.error('请求失败');
return;
}
const data = response.body;
this.messages.push({
role: 'assistant',
content: '',
});
// 处理流式数据
if (data) {
const reader = data.getReader();
const decoder = new TextDecoder('utf-8');
this.done = false;
while (!this.done) {
const { value, done: readerDone } = await reader.read();
// value值
// {"answer": "帮助解决"}
if (value) {
const char = decoder.decode(value);
console.log(char);
// if (char === '\n') {
// this.messages[this.messages.length - 1].content += '\n';
// continue;
// }
if (char) {
const res = JSON.parse(char);
this.messages[this.messages.length - 1].content += res.answer ?? '';
}
}
if (!this.done) {
this.done = readerDone;
}
}
}
// 对话结束
this.done = null;
},
},
// persist: {
// key: 'store',
// storage: window.localStorage,
// },
});