-
Notifications
You must be signed in to change notification settings - Fork 541
Expand file tree
/
Copy pathcustom_tool_call.py
More file actions
71 lines (63 loc) · 1.75 KB
/
custom_tool_call.py
File metadata and controls
71 lines (63 loc) · 1.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import json
import os
from mistralrs import (
Runner,
Which,
ChatCompletionRequest,
Architecture,
ToolChoice,
)
def local_search(query: str):
results = []
for root, _, files in os.walk("."):
for f in files:
if query in f:
path = os.path.join(root, f)
try:
content = open(path).read()
except Exception:
content = ""
results.append(
{
"title": f,
"description": path,
"url": path,
"content": content,
}
)
results.sort(key=lambda r: r["title"], reverse=True)
return results
def tool_cb(name: str, args: dict) -> str:
if name == "local_search":
return json.dumps(local_search(args.get("query", "")))
return ""
schema = json.dumps(
{
"type": "function",
"function": {
"name": "local_search",
"description": "Local filesystem search",
"parameters": {
"type": "object",
"properties": {"query": {"type": "string"}},
"required": ["query"],
},
},
}
)
runner = Runner(
which=Which.Plain(
model_id="NousResearch/Hermes-3-Llama-3.1-8B", arch=Architecture.Llama
),
tool_callbacks={"local_search": tool_cb},
)
res = runner.send_chat_completion_request(
ChatCompletionRequest(
model="default",
messages=[{"role": "user", "content": "Where is Cargo.toml in this repo?"}],
max_tokens=64,
tool_schemas=[schema],
tool_choice=ToolChoice.Auto,
)
)
print(res.choices[0].message.content)