-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathreact_agent_2.py
More file actions
53 lines (43 loc) · 1.45 KB
/
react_agent_2.py
File metadata and controls
53 lines (43 loc) · 1.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from typing import List, Any
from langchain.agents import create_agent
from langchain.tools import tool
from langchain_core.messages import HumanMessage
from langchain_core.prompts import PromptTemplate
from langchain_ollama import ChatOllama
from langchain_tavily import TavilySearch
from pydantic import BaseModel
from pydantic import Field
from tavily import TavilyClient
from dotenv import load_dotenv
from schemas import REACT_PROMPT_TEMPLATE
import os
api_key_check = os.getenv("TAVILY_API_KEY")
print(api_key_check)
load_dotenv()
class Source(BaseModel):
url: str = Field(description="the url of the source")
class AgentResponse(BaseModel):
answer: str = Field(description="The agent answer to the query.")
sources: List[Source] = Field(default_factory=list, description="The list of sources.")
@tool
def search_tool(search_query: str) -> dict[str, Any]:
''''
This tool is used to search web using Tavily
'''
search_results = TavilyClient.search(query=search_query, time_range='d')
return search_results
def run_llm():
llm = ChatOllama(
model="qwen3:30b-a3b",
validate_model_on_init=True,
temperature=0.8,
reasoning=True
)
agent = create_agent(model=llm, tools=[TavilySearch()], response_format=AgentResponse)
response = agent.invoke({
"messages": HumanMessage(content="What are the latest AI trends?")
}
)
print(response)
if __name__ == '__main__':
run_llm()