Streamlit + langchain 实现RAG问答机器人

py 复制代码
import os

os.environ["OPENAI_API_KEY"] = ''
os.environ["OPENAI_API_BASE"] = ''

import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.document_loaders import TextLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings

embeddings = OpenAIEmbeddings(
    model = 'text-embedding-ada-002'
)
llm = OpenAI(
    model_name = 'gpt-3.5-turbo'
)

st.set_page_config(page_title="Chat", page_icon="", layout="centered", initial_sidebar_state="auto", menu_items=None)
# openai.api_key = st.secrets.openai_key
st.title("Chat with AI")

# function for writing uploaded file in temp
def write_text_file(content, file_path):
    try:
        with open(file_path, 'w') as file:
            file.write(content)
        return True
    except Exception as e:
        print(f"Error occurred while writing the file: {e}")
        return False
    

uploaded_file = st.file_uploader("Upload an article", type="txt")
if uploaded_file is not None:
    content = uploaded_file.read().decode('utf-8')
    # st.write(content)
    file_path = "temp/file.txt"
    write_text_file(content, file_path)   
    
    loader = TextLoader(file_path)
    docs = loader.load()    
    text_splitter = CharacterTextSplitter(chunk_size=100, chunk_overlap=0)
    texts = text_splitter.split_documents(docs)
    db = Chroma.from_documents(texts, embeddings)    
    st.success("File Loaded Successfully!!")
        
if "messages" not in st.session_state.keys(): # Initialize the chat messages history
    st.session_state.messages = [
        {"role": "assistant", "content": "Ask me anything!"}
    ]


if "chat_engine" not in st.session_state.keys(): # Initialize the chat engine
        st.session_state.chat_engine = None

if question := st.chat_input("Your question"): # Prompt for user input and save to chat history
    st.session_state.messages.append({"role": "user", "content": question})

for message in st.session_state.messages: # Display the prior chat messages
    with st.chat_message(message["role"]):
        st.write(message["content"])

# If last message is not from assistant, generate a new response
if st.session_state.messages[-1]["role"] != "assistant":
    with st.chat_message("assistant"):
        with st.spinner("Thinking..."):
            # response = st.session_state.chat_engine.chat(prompt)
            similar_doc = db.similarity_search(question, k=1)
            context = similar_doc[0].page_content

            # set prompt template
            prompt_template = """
Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.

{context}

Question: {question}
Answer:
"""
            prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
            query_llm = LLMChain(llm=llm, prompt=prompt)
            response = query_llm.run({"context": context, "question": question})
            st.write(response)
            message = {"role": "assistant", "content": response}
            st.session_state.messages.append(message) # Add response to message history
相关推荐
CodeBlossom4 分钟前
java加强 -stream流
java·windows·python
(・Д・)ノ12 分钟前
python打卡day28
开发语言·python
保利九里15 分钟前
java中的方法详解
java·开发语言·python
Y31742917 分钟前
Python Day25 学习
python·学习
江禾藜27 分钟前
Python爬虫之路(14)--playwright浏览器自动化
爬虫·python·自动化
灏瀚星空27 分钟前
Python标准库完全指南:os、sys与math模块详解与实战应用
开发语言·python·microsoft
坐吃山猪29 分钟前
Python-Flask-Dive
开发语言·python·flask
AI大模型系统化学习33 分钟前
Excel MCP: 自动读取、提炼、分析Excel数据并生成可视化图表和分析报告
人工智能·ai·大模型·ai大模型·大模型学习·大模型入门·mcp
南部余额1 小时前
Python OOP核心技巧:如何正确选择实例方法、类方法和静态方法
开发语言·python
2303_Alpha2 小时前
深度学习入门:深度学习(完结)
人工智能·笔记·python·深度学习·神经网络·机器学习