From a5e059c05371d29e5388b6f81a52a4a3f9209479 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Wed, 28 Jun 2023 23:58:55 -0400 Subject: [PATCH] Free model when llama is unloaded. Closes #434 --- llama_cpp/llama.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index e030b49..2865d27 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -1437,6 +1437,9 @@ class Llama: return self._convert_text_completion_to_chat(completion) def __del__(self): + if self.model is not None: + llama_cpp.llama_free_model(self.model) + self.model = None if self.ctx is not None: llama_cpp.llama_free(self.ctx) self.ctx = None