-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy path13-ollama-chat.py
More file actions
45 lines (37 loc) · 1.5 KB
/
13-ollama-chat.py
File metadata and controls
45 lines (37 loc) · 1.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
from langchain.chat_models import ChatOllama, ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain, SimpleSequentialChain
from langchain.callbacks.manager import CallbackManager
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
# template = """The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
# The answer given must be in Indonesian language.
# """
template = """You are very knowleageable and friendly junior high school teacher that can answer any question from all subjects.
The answer is given in a short English paragraph and must be in Indonesian language.
Question: {input}
Answer:"""
# Use LLM on local machine
chat_llm = ChatOllama(
model="mistral",
# model="llama2:7b-chat",
temperature=0.0,
callback_manager=CallbackManager([StreamingStdOutCallbackHandler()])
)
# chat_llm = ChatOpenAI(
# model="gpt-3.5-turbo",
# temperature=0.0,
# streaming=True,
# callbacks=[StreamingStdOutCallbackHandler()]
# )
prompt = PromptTemplate.from_template(template)
# Create a chain
chain = LLMChain(
prompt=prompt,
llm=chat_llm,
verbose=True,
)
query = "Jelaskan tentang sejarah bidang ilmu matematika"
print(f"query: {query}")
response = chain.predict(input=query)