misc: Remove unnecessary metadata lookups (#1448)
Special tokens are already mapped from metadata by llama.cpp
This commit is contained in:
parent
4b54f79330
commit
389e09c2f5
1 changed files with 5 additions and 5 deletions
|
@ -410,8 +410,8 @@ class Llama:
|
|||
if self.verbose:
|
||||
print(f"Model metadata: {self.metadata}", file=sys.stderr)
|
||||
|
||||
eos_token_id = int(self.metadata.get("tokenizer.ggml.eos_token_id", self.token_eos()))
|
||||
bos_token_id = int(self.metadata.get("tokenizer.ggml.bos_token_id", self.token_bos()))
|
||||
eos_token_id = self.token_eos()
|
||||
bos_token_id = self.token_bos()
|
||||
|
||||
eos_token = self._model.token_get_text(eos_token_id)
|
||||
bos_token = self._model.token_get_text(bos_token_id)
|
||||
|
@ -961,9 +961,9 @@ class Llama:
|
|||
|
||||
completion_id: str = f"cmpl-{str(uuid.uuid4())}"
|
||||
created: int = int(time.time())
|
||||
prefix_token_id: int = int(self.metadata.get("tokenizer.ggml.prefix_token_id", self._model.token_prefix()))
|
||||
middle_token_id: int = int(self.metadata.get("tokenizer.ggml.middle_token_id", self._model.token_middle()))
|
||||
suffix_token_id: int = int(self.metadata.get("tokenizer.ggml.suffix_token_id", self._model.token_suffix()))
|
||||
prefix_token_id: int = self._model.token_prefix()
|
||||
middle_token_id: int = self._model.token_middle()
|
||||
suffix_token_id: int = self._model.token_suffix()
|
||||
# If prompt is empty, initialize completion with BOS token to avoid
|
||||
# detokenization including a space at the beginning of the completion
|
||||
completion_tokens: List[int] = [] if len(prompt) > 0 else [self.token_bos()]
|
||||
|
|
Loading…
Reference in a new issue