ollama

Форк
0
61 строка · 2.1 Кб
1
from langchain.document_loaders import OnlinePDFLoader
2
from langchain.vectorstores import Chroma
3
from langchain.embeddings import GPT4AllEmbeddings
4
from langchain import PromptTemplate
5
from langchain.llms import Ollama
6
from langchain.callbacks.manager import CallbackManager
7
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
8
from langchain.chains import RetrievalQA
9
import sys
10
import os
11

12
class SuppressStdout:
13
    def __enter__(self):
14
        self._original_stdout = sys.stdout
15
        self._original_stderr = sys.stderr
16
        sys.stdout = open(os.devnull, 'w')
17
        sys.stderr = open(os.devnull, 'w')
18

19
    def __exit__(self, exc_type, exc_val, exc_tb):
20
        sys.stdout.close()
21
        sys.stdout = self._original_stdout
22
        sys.stderr = self._original_stderr
23

24
# load the pdf and split it into chunks
25
loader = OnlinePDFLoader("https://d18rn0p25nwr6d.cloudfront.net/CIK-0001813756/975b3e9b-268e-4798-a9e4-2a9a7c92dc10.pdf")
26
data = loader.load()
27

28
from langchain.text_splitter import RecursiveCharacterTextSplitter
29
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
30
all_splits = text_splitter.split_documents(data)
31

32
with SuppressStdout():
33
    vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings())
34

35
while True:
36
    query = input("\nQuery: ")
37
    if query == "exit":
38
        break
39
    if query.strip() == "":
40
        continue
41

42
    # Prompt
43
    template = """Use the following pieces of context to answer the question at the end. 
44
    If you don't know the answer, just say that you don't know, don't try to make up an answer. 
45
    Use three sentences maximum and keep the answer as concise as possible. 
46
    {context}
47
    Question: {question}
48
    Helpful Answer:"""
49
    QA_CHAIN_PROMPT = PromptTemplate(
50
        input_variables=["context", "question"],
51
        template=template,
52
    )
53

54
    llm = Ollama(model="llama2:13b", callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))
55
    qa_chain = RetrievalQA.from_chain_type(
56
        llm,
57
        retriever=vectorstore.as_retriever(),
58
        chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
59
    )
60

61
    result = qa_chain({"query": query})

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.