Fix issue of missing words due to buffer overflow
This commit is contained in:
parent
9ab49bc1d4
commit
4100bdec31
1 changed files with 1 additions and 1 deletions
|
@ -445,7 +445,7 @@ class Llama:
|
||||||
"""
|
"""
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
output = b""
|
output = b""
|
||||||
size = 8
|
size = 16
|
||||||
buffer = (ctypes.c_char * size)()
|
buffer = (ctypes.c_char * size)()
|
||||||
for token in tokens:
|
for token in tokens:
|
||||||
n = llama_cpp.llama_token_to_str_with_model(
|
n = llama_cpp.llama_token_to_str_with_model(
|
||||||
|
|
Loading…
Reference in a new issue