From c1325dcdfba7cb331b4c09d110001658b94ebb9f Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 22 Mar 2024 23:44:04 -0400 Subject: [PATCH] fix: tool_call missing first token. --- llama_cpp/llama_chat_format.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index 5bda163..ccf4fd0 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -402,7 +402,7 @@ def _convert_completion_to_chat_function( "type": "function", "function": { "name": tool_name, - "arguments": "", + "arguments": chunk["choices"][0]["text"], }, } ],