Free model when llama is unloaded. Closes #434
This commit is contained in:
parent
442213b070
commit
a5e059c053
1 changed files with 3 additions and 0 deletions
|
@ -1437,6 +1437,9 @@ class Llama:
|
||||||
return self._convert_text_completion_to_chat(completion)
|
return self._convert_text_completion_to_chat(completion)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
if self.model is not None:
|
||||||
|
llama_cpp.llama_free_model(self.model)
|
||||||
|
self.model = None
|
||||||
if self.ctx is not None:
|
if self.ctx is not None:
|
||||||
llama_cpp.llama_free(self.ctx)
|
llama_cpp.llama_free(self.ctx)
|
||||||
self.ctx = None
|
self.ctx = None
|
||||||
|
|
Loading…
Reference in a new issue