From 5c50af7462440719bc72872a816717261e51360f Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Wed, 5 Apr 2023 03:25:09 -0400 Subject: [PATCH] Remove workaround --- llama_cpp/llama.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index e84d457..b7811c6 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -176,11 +176,6 @@ class Llama: The sampled token. """ assert self.ctx is not None - # Temporary workaround for https://github.com/ggerganov/llama.cpp/issues/684 - if temp == 0.0: - temp = 1.0 - top_p = 0.0 - top_k = 1 return llama_cpp.llama_sample_top_p_top_k( ctx=self.ctx, last_n_tokens_data=(llama_cpp.llama_token * self.last_n_tokens_size)(