misc fix verbose printing in functionary model
This commit is contained in:
parent
36048d46af
commit
de2e2bc083
1 changed files with 4 additions and 2 deletions
|
@ -955,9 +955,11 @@ def functionary_chat_handler(
|
||||||
assert isinstance(function_call, str)
|
assert isinstance(function_call, str)
|
||||||
assert stream is False # TODO: support stream mode
|
assert stream is False # TODO: support stream mode
|
||||||
|
|
||||||
print(new_prompt)
|
if llama.verbose:
|
||||||
print(completion["choices"][0]["text"])
|
print(new_prompt)
|
||||||
|
print(completion["choices"][0]["text"])
|
||||||
|
|
||||||
|
# TODO: support stream mode
|
||||||
return llama_types.CreateChatCompletionResponse(
|
return llama_types.CreateChatCompletionResponse(
|
||||||
id="chat" + completion["id"],
|
id="chat" + completion["id"],
|
||||||
object="chat.completion",
|
object="chat.completion",
|
||||||
|
|
Loading…
Reference in a new issue