From ec26f364cccc28425c8c4d373deb157137682a45 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Mon, 11 Dec 2023 10:25:37 -0500 Subject: [PATCH] Remove f16_kv --- llama_cpp/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 8076548..c696804 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -904,7 +904,7 @@ class Llama: ) self.context_params.yarn_orig_ctx = yarn_orig_ctx if yarn_orig_ctx != 0 else 0 self.context_params.mul_mat_q = mul_mat_q - self.context_params.f16_kv = f16_kv + # self.context_params.f16_kv = f16_kv self.context_params.logits_all = logits_all self.context_params.embedding = embedding