llama: fix exception in Llama.__del__ (#846)

This commit is contained in:
cebtenzzre 2023-11-01 18:53:57 -04:00 committed by GitHub
parent 3fc9147218
commit eefd76fe81
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1638,12 +1638,14 @@ class Llama:
) )
return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore return self._convert_completion_to_chat(completion_or_chunks, stream=stream) # type: ignore
def _free_model(self): def _free_model(self, *, _lfree_model=llama_cpp._lib.llama_free_model, _free=llama_cpp._lib.llama_free):
if hasattr(self, "model") and self.model is not None: model = getattr(self, 'model', None)
llama_cpp.llama_free_model(self.model) if model is not None:
_lfree_model(model)
self.model = None self.model = None
if hasattr(self, "ctx") and self.ctx is not None: ctx = getattr(self, 'ctx', None)
llama_cpp.llama_free(self.ctx) if ctx is not None:
_free(ctx)
self.ctx = None self.ctx = None
def __del__(self): def __del__(self):