build-your-own-rag-chatbot
/
app_5.py
89 строк · 2.8 Кб
1from langchain_openai import OpenAIEmbeddings
2from langchain_openai import ChatOpenAI
3from langchain_community.vectorstores import AstraDB
4from langchain.schema.runnable import RunnableMap
5from langchain.prompts import ChatPromptTemplate
6
7# Cache prompt for future runs
8@st.cache_data()
9def load_prompt():
10template = """You're a helpful AI assistent tasked to answer the user's questions.
11You're friendly and you answer extensively with multiple sentences. You prefer to use bulletpoints to summarize.
12
13CONTEXT:
14{context}
15
16QUESTION:
17{question}
18
19YOUR ANSWER:"""
20return ChatPromptTemplate.from_messages([("system", template)])
21prompt = load_prompt()
22
23# Cache OpenAI Chat Model for future runs
24@st.cache_resource()
25def load_chat_model():
26return ChatOpenAI(
27temperature=0.3,
28model='gpt-3.5-turbo',
29streaming=True,
30verbose=True
31)
32chat_model = load_chat_model()
33
34# Cache the Astra DB Vector Store for future runs
35@st.cache_resource(show_spinner='Connecting to Astra')
36def load_retriever():
37# Connect to the Vector Store
38vector_store = AstraDB(
39embedding=OpenAIEmbeddings(),
40collection_name="my_store",
41api_endpoint=st.secrets['ASTRA_API_ENDPOINT'],
42token=st.secrets['ASTRA_TOKEN']
43)
44
45# Get the retriever for the Chat Model
46retriever = vector_store.as_retriever(
47search_kwargs={"k": 5}
48)
49return retriever
50retriever = load_retriever()
51
52# Start with empty messages, stored in session state
53if 'messages' not in st.session_state:
54st.session_state.messages = []
55
56# Draw a title and some markdown
57st.title("Your personal Efficiency Booster")
58st.markdown("""Generative AI is considered to bring the next Industrial Revolution.
59Why? Studies show a **37% efficiency boost** in day to day work activities!""")
60
61# Draw all messages, both user and bot so far (every time the app reruns)
62for message in st.session_state.messages:
63st.chat_message(message['role']).markdown(message['content'])
64
65# Draw the chat input box
66if question := st.chat_input("What's up?"):
67
68# Store the user's question in a session object for redrawing next time
69st.session_state.messages.append({"role": "human", "content": question})
70
71# Draw the user's question
72with st.chat_message('human'):
73st.markdown(question)
74
75# Generate the answer by calling OpenAI's Chat Model
76inputs = RunnableMap({
77'context': lambda x: retriever.get_relevant_documents(x['question']),
78'question': lambda x: x['question']
79})
80chain = inputs | prompt | chat_model
81response = chain.invoke({'question': question})
82answer = response.content
83
84# Store the bot's answer in a session object for redrawing next time
85st.session_state.messages.append({"role": "ai", "content": answer})
86
87# Draw the bot's answer
88with st.chat_message('assistant'):
89st.markdown(answer)
90
91