fix: Make destructor to automatically call .close() method on Llama class.
This commit is contained in:
parent
f4491c4903
commit
4c1d74c0ae
2 changed files with 12 additions and 0 deletions
|
@ -64,6 +64,9 @@ class _LlamaModel:
|
|||
def close(self):
|
||||
self._exit_stack.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def vocab_type(self) -> int:
|
||||
assert self.model is not None
|
||||
return llama_cpp.llama_vocab_type(self.model)
|
||||
|
@ -292,6 +295,9 @@ class _LlamaContext:
|
|||
def close(self):
|
||||
self._exit_stack.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def n_ctx(self) -> int:
|
||||
assert self.ctx is not None
|
||||
return llama_cpp.llama_n_ctx(self.ctx)
|
||||
|
@ -531,6 +537,9 @@ class _LlamaBatch:
|
|||
def close(self):
|
||||
self._exit_stack.close()
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def n_tokens(self) -> int:
|
||||
assert self.batch is not None
|
||||
return self.batch.n_tokens
|
||||
|
|
|
@ -1968,6 +1968,9 @@ class Llama:
|
|||
"""Explicitly free the model from memory."""
|
||||
self._stack.close()
|
||||
|
||||
def __del__(self) -> None:
|
||||
self.close()
|
||||
|
||||
@staticmethod
|
||||
def logits_to_logprobs(
|
||||
logits: Union[npt.NDArray[np.single], List], axis: int = -1
|
||||
|
|
Loading…
Reference in a new issue