Skip to content

Commit

Permalink
Update test_tool_call.py
Browse files Browse the repository at this point in the history
  • Loading branch information
ochafik committed Feb 11, 2025
1 parent 4dc8643 commit 409b0bf
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions examples/server/tests/unit/test_tool_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def do_test_calc_result(result_override: str | None, n_predict: int, **kwargs):
res = server.make_request("POST", "/v1/chat/completions", data={
"max_tokens": n_predict,
"messages": [
{"role": "system", "content": "You are a chatbot that uses tools/functions. Dont overthink things, and provide very concise answers. Do not explain your reasoning to the user. Provide any numerical values back to the user with at most two decimals."},
{"role": "system", "content": "You are a tools-calling assistant. You express numerical values with at most two decimals."},
{"role": "user", "content": "What's the y coordinate of a point on the unit sphere at angle 30 degrees?"},
{
"role": "assistant",
Expand Down Expand Up @@ -630,8 +630,8 @@ def stop():
export LLAMA_CACHE=$HOME/Library/Caches/llama.cpp ;
export LLAMA_SERVER_BIN_PATH=$PWD/build/bin/llama-server ;
export ARGS=( --n=10 --temps=0,0.5,0.75,1,1.5,2,5, --append=all.jsonl ) ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Qwen 2.5 1.5B Q4_K_M" --hf bartowski/Qwen2.5-1.5B-Instruct-GGUF --ollama qwen2.5:1.5b-instruct-q4_K_M ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Qwen 2.5 Coder 7B Q4_K_M" --hf bartowski/Qwen2.5-Coder-7B-Instruct-GGUF --ollama qwen2.5-coder:7b ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Qwen 2.5 1.5B Q4_K_M" --hf bartowski/Qwen2.5-1.5B-Instruct-GGUF --ollama qwen2.5:1.5b-instruct-q4_K_M ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Qwen 2.5 7B Q4_K_M" --hf bartowski/Qwen2.5-7B-Instruct-GGUF ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Llama 3.2 Instruct 1B Q4_K_M" --hf bartowski/Llama-3.2-1B-Instruct-GGUF --ollama llama3.2:1b-instruct-q4_K_M ;
./examples/server/tests/unit/test_tool_call.py ${ARGS[@]} --model "Llama 3.2 Instruct 3B Q4_K_M" --hf bartowski/Llama-3.2-3B-Instruct-GGUF --ollama llama3.1:3b ;
Expand Down Expand Up @@ -695,7 +695,7 @@ def run(*, implementation: str, model_id: str, temp: float | None = None, output
failures = []
success_times = []
failure_times = []
print(f"Running {test_name}: ", file=sys.stderr, flush=True)
print(f"Running {test_name} ({implementation}, {args.model}): ", file=sys.stderr, flush=True)
for i in range(n):
start_time = time.time()
def elapsed():
Expand Down

0 comments on commit 409b0bf

Please sign in to comment.