From fd9f294b3a5194844f50d3b77cc71c51b8ffbb52 Mon Sep 17 00:00:00 2001 From: imaprogrammer <46126206+nb-programmer@users.noreply.github.com> Date: Fri, 16 Jun 2023 14:11:57 +0530 Subject: [PATCH] Update llama.py: Added how many input tokens in ValueError exception --- llama_cpp/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index a6f1e76..366f050 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -814,7 +814,7 @@ class Llama: llama_cpp.llama_reset_timings(self.ctx) if len(prompt_tokens) > self._n_ctx: - raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}") + raise ValueError(f"Requested tokens ({len(prompt_tokens)}) exceed context window of {self._n_ctx}") # Truncate max_tokens if requested tokens would exceed the context window max_tokens = (