llama: fix exception in Llama.__del__ (#846)
This commit is contained in:
parent
3fc9147218
commit
eefd76fe81
1 changed files with 7 additions and 5 deletions
|
@ -1638,12 +1638,14 @@ class Llama:
|
|||
)
|
||||
return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore
|
||||
|
||||
def _free_model(self):
|
||||
if hasattr(self, "model") and self.model is not None:
|
||||
llama_cpp.llama_free_model(self.model)
|
||||
def _free_model(self, *, _lfree_model=llama_cpp._lib.llama_free_model, _free=llama_cpp._lib.llama_free):
|
||||
model = getattr(self, 'model', None)
|
||||
if model is not None:
|
||||
_lfree_model(model)
|
||||
self.model = None
|
||||
if hasattr(self, "ctx") and self.ctx is not None:
|
||||
llama_cpp.llama_free(self.ctx)
|
||||
ctx = getattr(self, 'ctx', None)
|
||||
if ctx is not None:
|
||||
_free(ctx)
|
||||
self.ctx = None
|
||||
|
||||
def __del__(self):
|
||||
|
|
Loading…
Reference in a new issue