From 4100bdec31b3d8fc2b545e78965f3d6b679ec3a4 Mon Sep 17 00:00:00 2001 From: manasmagdum <37959958+manasmagdum@users.noreply.github.com> Date: Sun, 27 Aug 2023 16:39:46 +0530 Subject: [PATCH] Fix issue of missing words due to buffer overflow --- llama_cpp/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 49c98fd..40b1837 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -445,7 +445,7 @@ class Llama: """ assert self.model is not None output = b"" - size = 8 + size = 16 buffer = (ctypes.c_char * size)() for token in tokens: n = llama_cpp.llama_token_to_str_with_model(