fix: Raise exceptions when llama model or context fails to load

This commit is contained in:
Andrei Betlen 2024-02-22 00:09:45 -05:00
parent 3632241e98
commit dd22010e85

View file

@ -51,6 +51,9 @@ class _LlamaModel:
self.path_model.encode("utf-8"), self.params
)
if self.model is None:
raise ValueError(f"Failed to load model from file: {path_model}")
def __del__(self):
if self.model is not None and self._llama_free_model is not None:
self._llama_free_model(self.model)
@ -258,6 +261,9 @@ class _LlamaContext:
self.model.model, self.params
)
if self.ctx is None:
raise ValueError("Failed to create llama_context")
def __del__(self):
if self.ctx is not None and self._llama_free is not None:
self._llama_free(self.ctx)