From de2e2bc08328ef1292fa89a8fffc7e5e6d71f797 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Thu, 23 Nov 2023 20:14:23 -0500 Subject: [PATCH] misc fix verbose printing in functionary model --- llama_cpp/llama_chat_format.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index 7248516..c87b24a 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -955,9 +955,11 @@ def functionary_chat_handler( assert isinstance(function_call, str) assert stream is False # TODO: support stream mode - print(new_prompt) - print(completion["choices"][0]["text"]) + if llama.verbose: + print(new_prompt) + print(completion["choices"][0]["text"]) + # TODO: support stream mode return llama_types.CreateChatCompletionResponse( id="chat" + completion["id"], object="chat.completion",