-
Notifications
You must be signed in to change notification settings - Fork 1k
Expand file tree
/
Copy pathtools-decorators.py
More file actions
89 lines (78 loc) · 2.81 KB
/
tools-decorators.py
File metadata and controls
89 lines (78 loc) · 2.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import asyncio
from ollama import ChatResponse, chat
from ollama import (
ollama_tool,
ollama_async_tool,
get_ollama_tools,
get_ollama_name_async_tools,
get_ollama_tools_name,
get_ollama_tool_description)
@ollama_tool
def add_two_numbers(a: int, b: int) -> int:
"""
Add two numbers
Args:
a (int): The first number
b (int): The second number
Returns:
int: The sum of the two numbers
"""
return a + b
@ollama_tool
def subtract_two_numbers(a: int, b: int) -> int:
"""
Subtract two numbers
Args:
a (int): The first number
b (int): The second number
Returns:
int: The difference of the two numbers
"""
return a - b
@ollama_async_tool
async def web_search(query: str) -> str:
"""
Search the web for information,
Args:
query (str): The query to search the web for
Returns:
str: The result of the web search
"""
return f"Searching the web for {query}"
available_functions = get_ollama_tools_name() # this is a dictionary of tools
# tools are treated differently in synchronous code
async_available_functions = get_ollama_name_async_tools()
messages = [
{'role': 'system', 'content': f'You are a helpful assistant, with access to these tools: {get_ollama_tool_description()}'}, #usage example for the get_ollama_tool_description function
{'role': 'user', 'content': 'What is three plus one? and Search the web for what is ollama'}]
print('Prompt:', messages[1]['content'])
response: ChatResponse = chat(
'llama3.1',
messages=messages,
tools=get_ollama_tools(), # this is the list of tools using decorators
)
if response.message.tool_calls:
# There may be multiple tool calls in the response
for tool in response.message.tool_calls:
# Ensure the function is available, and then call it
if function_to_call := available_functions.get(tool.function.name):
print('Calling function:', tool.function.name)
print('Arguments:', tool.function.arguments)
# if the function is in the list of asynchronous functions it is executed with asyncio.run()
if tool.function.name in async_available_functions:
output = asyncio.run(function_to_call(**tool.function.arguments))
else:
output = function_to_call(**tool.function.arguments)
print('Function output:', output)
else:
print('Function', tool.function.name, 'not found')
# Only needed to chat with the model using the tool call results
if response.message.tool_calls:
# Add the function response to messages for the model to use
messages.append(response.message)
messages.append({'role': 'tool', 'content': str(output), 'name': tool.function.name})
# Get final response from model with function outputs
final_response = chat('llama3.1', messages=messages)
print('Final response:', final_response.message.content)
else:
print('No tool calls returned from model')