Merge pull request #385 from nb-programmer/main

Update llama.py: Added how many input tokens in ValueError exception
This commit is contained in:
Andrei 2023-06-16 23:12:39 -04:00 committed by GitHub
commit ff9faaa48b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -814,7 +814,7 @@ class Llama:
llama_cpp.llama_reset_timings(self.ctx)
if len(prompt_tokens) > self._n_ctx:
raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}")
raise ValueError(f"Requested tokens ({len(prompt_tokens)}) exceed context window of {self._n_ctx}")
# Truncate max_tokens if requested tokens would exceed the context window
max_tokens = (