前言
整个活,同时分享技术~至于是啥活,懂得都懂,男孩子自强自尊自爱!!! 先看看实现效果吧: 那么这里的话,我们使用到的是国内的LLM,来自moonshot的大语言模型。那么废话不多少,快速开始吧。
MoonShot
现在我们来获取暗月之面的API,这里我们需要进入到开发平台:platform.moonshot.cn/console/inf... 这里你可能比较好奇,为什么使用这个LLM,实际上,是因为综合体验下来,它的中文效果较好,可以完成较为复杂的操作,相对于3.5或者其他模型来说。同时价格在能够接受的合理范围,当然,在我们接下来使用的中转站当中也可以直接使用GPT4.0但是使用成本将大大提升! 进入平台之后,按照平台提示即可完成创建,当然这里注意,免费用户有15元钱的token,但是存在并发限制,因此建议适开通付费提高并发量。
编写提示词
那么首先的话,我们来开始编写到提示词,这个非常简单:
python
# initalize the config of chatbot
api_key = "sk-FGivAMvTnxPSWlp7HrGfDD"
openai_api_base = "https://api.moonshot.cn/v1"
system_prompt = "你是跨国婚姻法律小助手,小汐,负责回答用户关于跨国婚姻的问题。你的回答要清晰明了,有逻辑性和条理性。请使用中文回答。"
default_model = "moonshot-v1-8k"
temperature = 0.5
对接模型
编写完毕提示词之后,这还远远不够,我们需要对接模型,这里的话因为接口是按照openai的范式来的,所以的话我们直接用OpenAI这个库就好了。
然后看到下面的代码:
python
client = OpenAI(api_key=api_key,base_url=openai_api_base)
class ChatBotHandler(object):
def __init__(self, bot_name="chat"):
self.bot_name = bot_name
self.current_message = None
def user_stream(self,user_message, history):
self.current_message = user_message
return "", history + [[user_message, None]]
def bot_stream(self,history):
if(len(history)==0):
history.append([self.current_message,None])
bot_message = self.getResponse(history[-1][0],history)
history[-1][1] = ""
for character in bot_message:
history[-1][1] += character
time.sleep(0.02)
yield history
def signChat(self,history):
history_openai_format = []
# 先加入系统信息
history_openai_format.append(
{"role": "system",
"content": system_prompt
},
)
# 再加入解析信息
history_openai_format.extend(history)
# print(history_openai_format)
completion = client.chat.completions.create(
model=default_model,
messages=history_openai_format,
temperature=temperature,
)
result = completion.choices[0].message.content
return result
def getResponse(self,message,history):
history_openai_format = []
for human, assistant in history:
# 基础对话的系统设置
history_openai_format.append(
{"role": "system",
"content":system_prompt
},
)
if(human!=None):
history_openai_format.append({"role": "user", "content": human})
if(assistant!=None):
history_openai_format.append({"role": "assistant", "content": assistant})
completion = client.chat.completions.create(
model=default_model,
messages=history_openai_format,
temperature=temperature,
)
result = completion.choices[0].message.content
return result
def chat(self,message, history):
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human})
history_openai_format.append({"role": "system", "content": assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(model=default_model,
messages=history_openai_format,
temperature=1.0,
stream=True)
partial_message = ""
for chunk in response:
if chunk.choices[0].delta.content is not None:
partial_message = partial_message + chunk.choices[0].delta.content
yield partial_message
WebUI编写
之后的话,就是提供webUI,这里的话还是直接使用到了streamlit
python
class AssistantNovel(object):
def __init__(self):
self.chat = ChatBotHandler()
def get_response(self,prompt, history):
return self.chat.signChat(history)
def clear_chat_history(self):
st.session_state.messages = [{"role": "assistant", "content": "🍭🍡你好!我是跨国婚姻小助手,您可以咨询我关于这方面的任何法律问题🧐"}]
def chat_fn(self):
prompt = st.session_state.get("prompt-input")
st.session_state.messages.append({"role": "user", "content": prompt})
# 此时进入回答
with self.con:
with st.spinner("Thinking..."):
try:
response = self.get_response(prompt, st.session_state.messages)
except Exception as e:
print(e)
response = "哦┗|`O′|┛ 嗷~~,出错了,您的请求太频繁,请稍后再试!😥"
message = {"role": "assistant", "content": response}
st.session_state.messages.append(message)
def page(self):
if "messages" not in st.session_state.keys():
st.session_state.messages = [{"role": "assistant", "content": "🍭🍡你好!我是跨国婚姻小助手,您可以咨询我关于这方面的任何法律问题🧐"}]
# 加载历史聊天记录,对最后一条记录进行特殊处理
for message in st.session_state.messages:
if message != st.session_state.messages[-1]:
with st.chat_message(message["role"]):
st.write(message["content"])
else:
placeholder = st.empty()
full_response = ''
for item in message["content"]:
full_response += item
time.sleep(0.01)
placeholder.markdown(full_response)
placeholder.markdown(full_response)
# 主聊天对话窗口
self.con = st.container()
with self.con:
prompt = st.chat_input(placeholder="请输入对话",key="prompt-input",on_submit=self.chat_fn)
st.button('清空历史对话', on_click=self.clear_chat_history)
完整代码
okey,最后还是直接看到完整代码吧:
python
"""
@FileName:layer.py
@Author:Huterox
@Description:Go For It
@Time:2024/5/5 13:49
@Copyright:©2018-2024 awesome!
"""
#initialization the third-part model
import time
import streamlit as st
from openai import OpenAI
#finished the initialization
# initalize the config of chatbot
api_key = "sk-FGivAMvdHnrqUwzZp29mD"
openai_api_base = "https://api.moonshot.cn/v1"
system_prompt = "你是跨国婚姻法律小助手,小汐,负责回答用户关于跨国婚姻的问题。你的回答要清晰明了,有逻辑性和条理性。请使用中文回答。"
default_model = "moonshot-v1-8k"
temperature = 0.5
client = OpenAI(api_key=api_key,base_url=openai_api_base)
class ChatBotHandler(object):
def __init__(self, bot_name="chat"):
self.bot_name = bot_name
self.current_message = None
def user_stream(self,user_message, history):
self.current_message = user_message
return "", history + [[user_message, None]]
def bot_stream(self,history):
if(len(history)==0):
history.append([self.current_message,None])
bot_message = self.getResponse(history[-1][0],history)
history[-1][1] = ""
for character in bot_message:
history[-1][1] += character
time.sleep(0.02)
yield history
def signChat(self,history):
history_openai_format = []
# 先加入系统信息
history_openai_format.append(
{"role": "system",
"content": system_prompt
},
)
# 再加入解析信息
history_openai_format.extend(history)
# print(history_openai_format)
completion = client.chat.completions.create(
model=default_model,
messages=history_openai_format,
temperature=temperature,
)
result = completion.choices[0].message.content
return result
def getResponse(self,message,history):
history_openai_format = []
for human, assistant in history:
# 基础对话的系统设置
history_openai_format.append(
{"role": "system",
"content":system_prompt
},
)
if(human!=None):
history_openai_format.append({"role": "user", "content": human})
if(assistant!=None):
history_openai_format.append({"role": "assistant", "content": assistant})
completion = client.chat.completions.create(
model=default_model,
messages=history_openai_format,
temperature=temperature,
)
result = completion.choices[0].message.content
return result
def chat(self,message, history):
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human})
history_openai_format.append({"role": "system", "content": assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(model=default_model,
messages=history_openai_format,
temperature=1.0,
stream=True)
partial_message = ""
for chunk in response:
if chunk.choices[0].delta.content is not None:
partial_message = partial_message + chunk.choices[0].delta.content
yield partial_message
class AssistantNovel(object):
def __init__(self):
self.chat = ChatBotHandler()
def get_response(self,prompt, history):
return self.chat.signChat(history)
def clear_chat_history(self):
st.session_state.messages = [{"role": "assistant", "content": "🍭🍡你好!我是跨国婚姻小助手,您可以咨询我关于这方面的任何法律问题🧐"}]
def chat_fn(self):
prompt = st.session_state.get("prompt-input")
st.session_state.messages.append({"role": "user", "content": prompt})
# 此时进入回答
with self.con:
with st.spinner("Thinking..."):
try:
response = self.get_response(prompt, st.session_state.messages)
except Exception as e:
print(e)
response = "哦┗|`O′|┛ 嗷~~,出错了,您的请求太频繁,请稍后再试!😥"
message = {"role": "assistant", "content": response}
st.session_state.messages.append(message)
def page(self):
if "messages" not in st.session_state.keys():
st.session_state.messages = [{"role": "assistant", "content": "🍭🍡你好!我是跨国婚姻小助手,您可以咨询我关于这方面的任何法律问题🧐"}]
# 加载历史聊天记录,对最后一条记录进行特殊处理
for message in st.session_state.messages:
if message != st.session_state.messages[-1]:
with st.chat_message(message["role"]):
st.write(message["content"])
else:
placeholder = st.empty()
full_response = ''
for item in message["content"]:
full_response += item
time.sleep(0.01)
placeholder.markdown(full_response)
placeholder.markdown(full_response)
# 主聊天对话窗口
self.con = st.container()
with self.con:
prompt = st.chat_input(placeholder="请输入对话",key="prompt-input",on_submit=self.chat_fn)
st.button('清空历史对话', on_click=self.clear_chat_history)
if __name__ == '__main__':
st.set_page_config(page_title="跨国婚姻法律小助手",
page_icon="🤖",
layout="wide",
initial_sidebar_state="auto",
)
a,b,c = st.columns([1,2,1])
with b:
assistant = AssistantNovel()
assistant.page()