llama: fix exception in Llama.__del__ (#846)
This commit is contained in:
parent
3fc9147218
commit
eefd76fe81
1 changed files with 7 additions and 5 deletions
|
@ -1638,12 +1638,14 @@ class Llama:
|
||||||
)
|
)
|
||||||
return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore
|
return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore
|
||||||
|
|
||||||
def _free_model(self):
|
def _free_model(self, *, _lfree_model=llama_cpp._lib.llama_free_model, _free=llama_cpp._lib.llama_free):
|
||||||
if hasattr(self, "model") and self.model is not None:
|
model = getattr(self, 'model', None)
|
||||||
llama_cpp.llama_free_model(self.model)
|
if model is not None:
|
||||||
|
_lfree_model(model)
|
||||||
self.model = None
|
self.model = None
|
||||||
if hasattr(self, "ctx") and self.ctx is not None:
|
ctx = getattr(self, 'ctx', None)
|
||||||
llama_cpp.llama_free(self.ctx)
|
if ctx is not None:
|
||||||
|
_free(ctx)
|
||||||
self.ctx = None
|
self.ctx = None
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
|
Loading…
Reference in a new issue