fix: segfault for models without eos / bos tokens. Closes #1463
This commit is contained in:
parent
e811a81066
commit
d99a6ba607
1 changed files with 2 additions and 2 deletions
|
@ -413,8 +413,8 @@ class Llama:
|
||||||
eos_token_id = self.token_eos()
|
eos_token_id = self.token_eos()
|
||||||
bos_token_id = self.token_bos()
|
bos_token_id = self.token_bos()
|
||||||
|
|
||||||
eos_token = self._model.token_get_text(eos_token_id)
|
eos_token = self._model.token_get_text(eos_token_id) if eos_token_id != -1 else ""
|
||||||
bos_token = self._model.token_get_text(bos_token_id)
|
bos_token = self._model.token_get_text(bos_token_id) if bos_token_id != -1 else ""
|
||||||
|
|
||||||
# Unfortunately the llama.cpp API does not return metadata arrays, so we can't get template names from tokenizer.chat_templates
|
# Unfortunately the llama.cpp API does not return metadata arrays, so we can't get template names from tokenizer.chat_templates
|
||||||
template_choices = dict((name[10:], template) for name, template in self.metadata.items() if name.startswith("tokenizer.chat_template."))
|
template_choices = dict((name[10:], template) for name, template in self.metadata.items() if name.startswith("tokenizer.chat_template."))
|
||||||
|
|
Loading…
Reference in a new issue