fix: Raise exceptions when llama model or context fails to load
This commit is contained in:
parent
3632241e98
commit
dd22010e85
1 changed files with 6 additions and 0 deletions
|
@ -51,6 +51,9 @@ class _LlamaModel:
|
|||
self.path_model.encode("utf-8"), self.params
|
||||
)
|
||||
|
||||
if self.model is None:
|
||||
raise ValueError(f"Failed to load model from file: {path_model}")
|
||||
|
||||
def __del__(self):
|
||||
if self.model is not None and self._llama_free_model is not None:
|
||||
self._llama_free_model(self.model)
|
||||
|
@ -258,6 +261,9 @@ class _LlamaContext:
|
|||
self.model.model, self.params
|
||||
)
|
||||
|
||||
if self.ctx is None:
|
||||
raise ValueError("Failed to create llama_context")
|
||||
|
||||
def __del__(self):
|
||||
if self.ctx is not None and self._llama_free is not None:
|
||||
self._llama_free(self.ctx)
|
||||
|
|
Loading…
Reference in a new issue