misc fix verbose printing in functionary model

This commit is contained in:
Andrei Betlen 2023-11-23 20:14:23 -05:00
parent 36048d46af
commit de2e2bc083

View file

@ -955,9 +955,11 @@ def functionary_chat_handler(
assert isinstance(function_call, str) assert isinstance(function_call, str)
assert stream is False # TODO: support stream mode assert stream is False # TODO: support stream mode
if llama.verbose:
print(new_prompt) print(new_prompt)
print(completion["choices"][0]["text"]) print(completion["choices"][0]["text"])
# TODO: support stream mode
return llama_types.CreateChatCompletionResponse( return llama_types.CreateChatCompletionResponse(
id="chat" + completion["id"], id="chat" + completion["id"],
object="chat.completion", object="chat.completion",