build-your-own-rag-chatbot
/
app_4.py
68 строк · 2.2 Кб
1import streamlit as st
2from langchain_openai import OpenAIEmbeddings
3from langchain_openai import ChatOpenAI
4from langchain_community.vectorstores import AstraDB
5from langchain.schema.runnable import RunnableMap
6from langchain.prompts import ChatPromptTemplate
7
8# Cache prompt for future runs
9@st.cache_data()
10def load_prompt():
11template = """You're a helpful AI assistent tasked to answer the user's questions.
12You're friendly and you answer extensively with multiple sentences. You prefer to use bulletpoints to summarize.
13
14QUESTION:
15{question}
16
17YOUR ANSWER:"""
18return ChatPromptTemplate.from_messages([("system", template)])
19prompt = load_prompt()
20
21# Cache OpenAI Chat Model for future runs
22@st.cache_resource()
23def load_chat_model():
24return ChatOpenAI(
25temperature=0.3,
26model='gpt-3.5-turbo',
27streaming=True,
28verbose=True
29)
30chat_model = load_chat_model()
31
32# Start with empty messages, stored in session state
33if 'messages' not in st.session_state:
34st.session_state.messages = []
35
36# Draw a title and some markdown
37st.title("Your personal Efficiency Booster")
38st.markdown("""Generative AI is considered to bring the next Industrial Revolution.
39Why? Studies show a **37% efficiency boost** in day to day work activities!""")
40
41# Draw all messages, both user and bot so far (every time the app reruns)
42for message in st.session_state.messages:
43st.chat_message(message['role']).markdown(message['content'])
44
45# Draw the chat input box
46if question := st.chat_input("What's up?"):
47
48# Store the user's question in a session object for redrawing next time
49st.session_state.messages.append({"role": "human", "content": question})
50
51# Draw the user's question
52with st.chat_message('human'):
53st.markdown(question)
54
55# Generate the answer by calling OpenAI's Chat Model
56inputs = RunnableMap({
57'question': lambda x: x['question']
58})
59chain = inputs | prompt | chat_model
60response = chain.invoke({'question': question})
61answer = response.content
62
63# Store the bot's answer in a session object for redrawing next time
64st.session_state.messages.append({"role": "ai", "content": answer})
65
66# Draw the bot's answer
67with st.chat_message('assistant'):
68st.markdown(answer)
69