feat: Use new llama_token_is_eog in create_completions
This commit is contained in:
parent
b21ba0e2ac
commit
d40a250ef3
1 changed files with 2 additions and 1 deletions
|
@ -1034,7 +1034,8 @@ class Llama:
|
|||
logits_processor=logits_processor,
|
||||
grammar=grammar,
|
||||
):
|
||||
if token == self._token_eos:
|
||||
assert self._model.model is not None
|
||||
if llama_cpp.llama_token_is_eog(self._model.model, token):
|
||||
text = self.detokenize(completion_tokens, prev_tokens=prompt_tokens)
|
||||
finish_reason = "stop"
|
||||
break
|
||||
|
|
Loading…
Reference in a new issue