Streamlit + langchain 实现RAG问答机器人

py 复制代码
import os

os.environ["OPENAI_API_KEY"] = ''
os.environ["OPENAI_API_BASE"] = ''

import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.document_loaders import TextLoader
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings

embeddings = OpenAIEmbeddings(
    model = 'text-embedding-ada-002'
)
llm = OpenAI(
    model_name = 'gpt-3.5-turbo'
)

st.set_page_config(page_title="Chat", page_icon="", layout="centered", initial_sidebar_state="auto", menu_items=None)
# openai.api_key = st.secrets.openai_key
st.title("Chat with AI")

# function for writing uploaded file in temp
def write_text_file(content, file_path):
    try:
        with open(file_path, 'w') as file:
            file.write(content)
        return True
    except Exception as e:
        print(f"Error occurred while writing the file: {e}")
        return False
    

uploaded_file = st.file_uploader("Upload an article", type="txt")
if uploaded_file is not None:
    content = uploaded_file.read().decode('utf-8')
    # st.write(content)
    file_path = "temp/file.txt"
    write_text_file(content, file_path)   
    
    loader = TextLoader(file_path)
    docs = loader.load()    
    text_splitter = CharacterTextSplitter(chunk_size=100, chunk_overlap=0)
    texts = text_splitter.split_documents(docs)
    db = Chroma.from_documents(texts, embeddings)    
    st.success("File Loaded Successfully!!")
        
if "messages" not in st.session_state.keys(): # Initialize the chat messages history
    st.session_state.messages = [
        {"role": "assistant", "content": "Ask me anything!"}
    ]


if "chat_engine" not in st.session_state.keys(): # Initialize the chat engine
        st.session_state.chat_engine = None

if question := st.chat_input("Your question"): # Prompt for user input and save to chat history
    st.session_state.messages.append({"role": "user", "content": question})

for message in st.session_state.messages: # Display the prior chat messages
    with st.chat_message(message["role"]):
        st.write(message["content"])

# If last message is not from assistant, generate a new response
if st.session_state.messages[-1]["role"] != "assistant":
    with st.chat_message("assistant"):
        with st.spinner("Thinking..."):
            # response = st.session_state.chat_engine.chat(prompt)
            similar_doc = db.similarity_search(question, k=1)
            context = similar_doc[0].page_content

            # set prompt template
            prompt_template = """
Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.

{context}

Question: {question}
Answer:
"""
            prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
            query_llm = LLMChain(llm=llm, prompt=prompt)
            response = query_llm.run({"context": context, "question": question})
            st.write(response)
            message = {"role": "assistant", "content": response}
            st.session_state.messages.append(message) # Add response to message history
相关推荐
御承扬2 小时前
编程素养提升之EffectivePython(Builder篇)
python·设计模式·1024程序员节
chenchihwen3 小时前
AI代码开发宝库系列:FAISS向量数据库
数据库·人工智能·python·faiss·1024程序员节
AI视觉网奇4 小时前
json 可视化 2025 coco json
python·1024程序员节
mit6.8244 小时前
[nanoGPT] ChatGPT 的 LLM 的全栈实现 | 快速上手
python
DKunYu4 小时前
2.1线性回归
pytorch·python·深度学习·1024程序员节
大飞记Python4 小时前
实战分享:一键自动化下载指定版本的Chrome及Chromedriver(附Python源码)
chrome·python·自动化
何如千泷5 小时前
【论文阅读】Qwen2.5-VL Technical Report
论文阅读·大模型·多模态·1024程序员节
程序员杰哥6 小时前
如何使用Postman做接口自动化测试及完美的可视化报告?
自动化测试·软件测试·python·测试工具·jenkins·postman·1024程序员节
老歌老听老掉牙6 小时前
参数曲线切向量与叉乘向量的精确计算与分析
python·sympy·1024程序员节
Cherry Zack6 小时前
FastAPI 入门指南 :基础概念与核心特性
开发语言·python·fastapi·1024程序员节