-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchatPDF_Mistral7B.py
More file actions
27 lines (22 loc) · 1.2 KB
/
chatPDF_Mistral7B.py
File metadata and controls
27 lines (22 loc) · 1.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
from RAG.storing import VectorDatabaseStore
from RAG.loading import VectorDatabaseLoad
from RAG.query import QueryEngineManager
import os
# Constants
pdf_file_path = ("Source_Documents/A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language "
"Models.pdf")
prompt = "give me the summary of hallucination mitigation techniques in Large language Models"
# Only using this because vectorstore requires openAI API key regardless of using local llm
os.environ["OPENAI_API_KEY"] = "Type your API here"
if __name__ == "__main__":
data_available = False
pcr = False # Try the parent child chunk retrieval. Not good results to be honest
if data_available:
VectorloadManager = VectorDatabaseLoad(pdf_file_path)
vector_index, service_context, base_nodes = VectorloadManager.load_VectorDB() # Need to remove base_nodes which means no need to re-read the file again.
else:
VectorStoreManager = VectorDatabaseStore(pdf_file_path)
vector_index, service_context, base_nodes = VectorStoreManager.store_vectordb()
Query_Engine = QueryEngineManager(vector_index, base_nodes, service_context)
response = Query_Engine.query(prompt)
print(response)