This commit is contained in:
Andrei Betlen 2023-05-26 03:00:35 -04:00
parent 6bd1075291
commit 8fa2ef1959

View file

@ -696,9 +696,7 @@ class Llama:
llama_cpp.llama_reset_timings(self.ctx) llama_cpp.llama_reset_timings(self.ctx)
if len(prompt_tokens) + max_tokens > self._n_ctx: if len(prompt_tokens) + max_tokens > self._n_ctx:
raise ValueError( raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}")
f"Requested tokens exceed context window of {self._n_ctx}"
)
if stop != []: if stop != []:
stop_sequences = [s.encode("utf-8") for s in stop] stop_sequences = [s.encode("utf-8") for s in stop]