-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy path6-docqa.py
More file actions
79 lines (63 loc) · 1.72 KB
/
6-docqa.py
File metadata and controls
79 lines (63 loc) · 1.72 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from dotenv import load_dotenv, find_dotenv
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain, RetrievalQA
from langchain.document_loaders import PyPDFLoader
from langchain.vectorstores import FAISS
from langchain.embeddings.openai import OpenAIEmbeddings
import chainlit as cl
"""
Load OpenAI API key
"""
_ = load_dotenv(find_dotenv())
pdf_filepath = "Peraturan Menteri Pendidikan dan Kebudayaan Nomor 14 Tahun 2020.pdf"
# PDF parser
loader = PyPDFLoader(pdf_filepath)
pages = loader.load_and_split()
# Create vector store DB
embeddings = OpenAIEmbeddings()
db = FAISS.from_documents(pages, embeddings)
# Initialize chat
@cl.on_chat_start
def init():
"""
Model
"""
chat_llm = ChatOpenAI(
temperature=0.3,
streaming=True
)
"""
Chain
"""
# prompt = ChatPromptTemplate.from_template(
# """
# You are a friendly helpdesk agent for Ministry of Eduction, Research, and Technology.
# Always answer in Indonesian.
# """
# )
# chain = LLMChain(
# llm=chat_llm,
# prompt=prompt,
# verbose=True
# )
chain = RetrievalQA.from_llm(
llm=chat_llm,
retriever=db.as_retriever()
)
cl.user_session.set("chain", chain)
@cl.on_message
async def main(message: str):
# Retrieve the chain from the user session
chain = cl.user_session.get("chain")
# Infer from the chain
outputs = await chain.acall(
message,
callbacks=[cl.AsyncLangchainCallbackHandler()]
)
# Post-processing (if any)
res = outputs["result"]
# Send the response
await cl.Message(
content=res
).send()