Streamlit + langchain 实现RAG问答机器人

py 复制代码
import os

os.environ["OPENAI_API_KEY"] = ''
os.environ["OPENAI_API_BASE"] = ''

import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.document_loaders import TextLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings

embeddings = OpenAIEmbeddings(
    model = 'text-embedding-ada-002'
)
llm = OpenAI(
    model_name = 'gpt-3.5-turbo'
)

st.set_page_config(page_title="Chat", page_icon="", layout="centered", initial_sidebar_state="auto", menu_items=None)
# openai.api_key = st.secrets.openai_key
st.title("Chat with AI")

# function for writing uploaded file in temp
def write_text_file(content, file_path):
    try:
        with open(file_path, 'w') as file:
            file.write(content)
        return True
    except Exception as e:
        print(f"Error occurred while writing the file: {e}")
        return False
    

uploaded_file = st.file_uploader("Upload an article", type="txt")
if uploaded_file is not None:
    content = uploaded_file.read().decode('utf-8')
    # st.write(content)
    file_path = "temp/file.txt"
    write_text_file(content, file_path)   
    
    loader = TextLoader(file_path)
    docs = loader.load()    
    text_splitter = CharacterTextSplitter(chunk_size=100, chunk_overlap=0)
    texts = text_splitter.split_documents(docs)
    db = Chroma.from_documents(texts, embeddings)    
    st.success("File Loaded Successfully!!")
        
if "messages" not in st.session_state.keys(): # Initialize the chat messages history
    st.session_state.messages = [
        {"role": "assistant", "content": "Ask me anything!"}
    ]


if "chat_engine" not in st.session_state.keys(): # Initialize the chat engine
        st.session_state.chat_engine = None

if question := st.chat_input("Your question"): # Prompt for user input and save to chat history
    st.session_state.messages.append({"role": "user", "content": question})

for message in st.session_state.messages: # Display the prior chat messages
    with st.chat_message(message["role"]):
        st.write(message["content"])

# If last message is not from assistant, generate a new response
if st.session_state.messages[-1]["role"] != "assistant":
    with st.chat_message("assistant"):
        with st.spinner("Thinking..."):
            # response = st.session_state.chat_engine.chat(prompt)
            similar_doc = db.similarity_search(question, k=1)
            context = similar_doc[0].page_content

            # set prompt template
            prompt_template = """
Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.

{context}

Question: {question}
Answer:
"""
            prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
            query_llm = LLMChain(llm=llm, prompt=prompt)
            response = query_llm.run({"context": context, "question": question})
            st.write(response)
            message = {"role": "assistant", "content": response}
            st.session_state.messages.append(message) # Add response to message history
相关推荐
不写八个12 分钟前
Python办公自动化教程(005):Word添加段落
开发语言·python·word
_.Switch30 分钟前
Python机器学习框架介绍和入门案例:Scikit-learn、TensorFlow与Keras、PyTorch
python·机器学习·架构·tensorflow·keras·scikit-learn
赵荏苒41 分钟前
Python小白之Pandas1
开发语言·python
百里香酚兰1 小时前
【AI学习笔记】基于Unity+DeepSeek开发的一些BUG记录&解决方案
人工智能·学习·unity·大模型·deepseek
一眼万里*e1 小时前
fish-speech语音大模型本地部署
python·flask·大模型
结衣结衣.2 小时前
python中的函数介绍
java·c语言·开发语言·前端·笔记·python·学习
茫茫人海一粒沙2 小时前
Python 代码编写规范
开发语言·python
林浩2332 小时前
Python——异常处理机制
python
数据分析螺丝钉2 小时前
力扣第240题“搜索二维矩阵 II”
经验分享·python·算法·leetcode·面试
小蜗笔记3 小时前
在Python中实现多目标优化问题(7)模拟退火算法的调用
开发语言·python·模拟退火算法