-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathollama_tools.py
71 lines (55 loc) · 1.86 KB
/
ollama_tools.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import ollama
import random
import asyncio
MODEL = "llama3.2"
SYS_PROMPT = """
You are a friendly chatbot answering the user's questions.
"""
tools = [
{
"type": "function",
"function": {
"name": "getWeather",
"description": "Get weather information about a city",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "Name of the city",
}
},
"required": ["city"],
},
},
}
]
def handle_tool_call(tool_call):
tool_name = tool_call["function"]["name"]
tool_args = tool_call["function"]["arguments"]
if tool_name == "getWeather":
temperature = random.randint(0, 20)
weather_type = random.choice(["sunny", "cloudy", "raining"])
city = tool_args["city"]
return f"The weather in {city} today: {temperature}°, {weather_type}"
else:
raise ValueError(f"Unknown tool {tool_name}")
async def run():
client = ollama.AsyncClient()
user_msg = "What's the weather like in Helsinki?"
messages = [
{"role": "system", "content": SYS_PROMPT},
{"role": "user", "content": user_msg},
]
response = await client.chat(model=MODEL, messages=messages, tools=tools)
messages.append(response["message"])
if response["message"].get("tool_calls"):
for tool_call in response["message"]["tool_calls"]:
tool_result = handle_tool_call(tool_call)
messages.append({"role": "tool", "content": tool_result})
stream = await client.chat(
model=MODEL, messages=messages, tools=tools, stream=True
)
async for chunk in stream:
print(chunk["message"]["content"], end="", flush=True)
asyncio.run(run())