build-your-own-rag-chatbot

Форк
0
89 строк · 2.8 Кб
1
from langchain_openai import OpenAIEmbeddings
2
from langchain_openai import ChatOpenAI
3
from langchain_community.vectorstores import AstraDB
4
from langchain.schema.runnable import RunnableMap
5
from langchain.prompts import ChatPromptTemplate
6

7
# Cache prompt for future runs
8
@st.cache_data()
9
def load_prompt():
10
    template = """You're a helpful AI assistent tasked to answer the user's questions.
11
You're friendly and you answer extensively with multiple sentences. You prefer to use bulletpoints to summarize.
12

13
CONTEXT:
14
{context}
15

16
QUESTION:
17
{question}
18

19
YOUR ANSWER:"""
20
    return ChatPromptTemplate.from_messages([("system", template)])
21
prompt = load_prompt()
22

23
# Cache OpenAI Chat Model for future runs
24
@st.cache_resource()
25
def load_chat_model():
26
    return ChatOpenAI(
27
        temperature=0.3,
28
        model='gpt-3.5-turbo',
29
        streaming=True,
30
        verbose=True
31
    )
32
chat_model = load_chat_model()
33

34
# Cache the Astra DB Vector Store for future runs
35
@st.cache_resource(show_spinner='Connecting to Astra')
36
def load_retriever():
37
    # Connect to the Vector Store
38
    vector_store = AstraDB(
39
        embedding=OpenAIEmbeddings(),
40
        collection_name="my_store",
41
        api_endpoint=st.secrets['ASTRA_API_ENDPOINT'],
42
        token=st.secrets['ASTRA_TOKEN']
43
    )
44

45
    # Get the retriever for the Chat Model
46
    retriever = vector_store.as_retriever(
47
        search_kwargs={"k": 5}
48
    )
49
    return retriever
50
retriever = load_retriever()
51

52
# Start with empty messages, stored in session state
53
if 'messages' not in st.session_state:
54
    st.session_state.messages = []
55

56
# Draw a title and some markdown
57
st.title("Your personal Efficiency Booster")
58
st.markdown("""Generative AI is considered to bring the next Industrial Revolution.  
59
Why? Studies show a **37% efficiency boost** in day to day work activities!""")
60

61
# Draw all messages, both user and bot so far (every time the app reruns)
62
for message in st.session_state.messages:
63
    st.chat_message(message['role']).markdown(message['content'])
64

65
# Draw the chat input box
66
if question := st.chat_input("What's up?"):
67
    
68
    # Store the user's question in a session object for redrawing next time
69
    st.session_state.messages.append({"role": "human", "content": question})
70

71
    # Draw the user's question
72
    with st.chat_message('human'):
73
        st.markdown(question)
74

75
    # Generate the answer by calling OpenAI's Chat Model
76
    inputs = RunnableMap({
77
        'context': lambda x: retriever.get_relevant_documents(x['question']),
78
        'question': lambda x: x['question']
79
    })
80
    chain = inputs | prompt | chat_model
81
    response = chain.invoke({'question': question})
82
    answer = response.content
83

84
    # Store the bot's answer in a session object for redrawing next time
85
    st.session_state.messages.append({"role": "ai", "content": answer})
86

87
    # Draw the bot's answer
88
    with st.chat_message('assistant'):
89
        st.markdown(answer)
90

91

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.