From eefd76fe81cec84cf72b7cba8c7035f0402c5aba Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Wed, 1 Nov 2023 18:53:57 -0400 Subject: [PATCH] llama: fix exception in Llama.__del__ (#846) --- llama_cpp/llama.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index fb2a541..bc747cf 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -1638,12 +1638,14 @@ class Llama: ) return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore - def _free_model(self): - if hasattr(self, "model") and self.model is not None: - llama_cpp.llama_free_model(self.model) + def _free_model(self, *, _lfree_model=llama_cpp._lib.llama_free_model, _free=llama_cpp._lib.llama_free): + model = getattr(self, 'model', None) + if model is not None: + _lfree_model(model) self.model = None - if hasattr(self, "ctx") and self.ctx is not None: - llama_cpp.llama_free(self.ctx) + ctx = getattr(self, 'ctx', None) + if ctx is not None: + _free(ctx) self.ctx = None def __del__(self):