Added tokenizer
This commit is contained in:
parent
8fa2ef1959
commit
5be8354e11
1 changed files with 4 additions and 2 deletions
|
@ -1416,8 +1416,10 @@ class LlamaTokenizer:
|
||||||
def __init__(self, llama: Llama):
|
def __init__(self, llama: Llama):
|
||||||
self.llama = llama
|
self.llama = llama
|
||||||
|
|
||||||
def encode(self, text: str) -> List[int]:
|
def encode(self, text: str, add_bos: bool = True) -> List[int]:
|
||||||
return self.llama.tokenize(text.encode("utf-8", errors="ignore"))
|
return self.llama.tokenize(
|
||||||
|
text.encode("utf-8", errors="ignore"), add_bos=add_bos
|
||||||
|
)
|
||||||
|
|
||||||
def decode(self, tokens: List[int]) -> str:
|
def decode(self, tokens: List[int]) -> str:
|
||||||
return self.llama.detokenize(tokens).decode("utf-8", errors="ignore")
|
return self.llama.detokenize(tokens).decode("utf-8", errors="ignore")
|
||||||
|
|
Loading…
Reference in a new issue