Merge branch 'main' of https://github.com/abetlen/llama-cpp-python into main
This commit is contained in:
commit
3561ebf536
3 changed files with 38 additions and 8 deletions
|
@ -51,6 +51,9 @@ class _LlamaModel:
|
|||
self.path_model.encode("utf-8"), self.params
|
||||
)
|
||||
|
||||
if self.model is None:
|
||||
raise ValueError(f"Failed to load model from file: {path_model}")
|
||||
|
||||
def __del__(self):
|
||||
if self.model is not None and self._llama_free_model is not None:
|
||||
self._llama_free_model(self.model)
|
||||
|
@ -258,6 +261,9 @@ class _LlamaContext:
|
|||
self.model.model, self.params
|
||||
)
|
||||
|
||||
if self.ctx is None:
|
||||
raise ValueError("Failed to create llama_context")
|
||||
|
||||
def __del__(self):
|
||||
if self.ctx is not None and self._llama_free is not None:
|
||||
self._llama_free(self.ctx)
|
||||
|
|
|
@ -1885,8 +1885,9 @@ class Llama:
|
|||
cls,
|
||||
repo_id: str,
|
||||
filename: Optional[str],
|
||||
local_dir: Optional[Union[str, os.PathLike[str]]] = ".",
|
||||
local_dir: Optional[Union[str, os.PathLike[str]]] = None,
|
||||
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
|
||||
cache_dir: Optional[Union[str, os.PathLike[str]]] = None,
|
||||
**kwargs: Any,
|
||||
) -> "Llama":
|
||||
"""Create a Llama model from a pretrained model name or path.
|
||||
|
@ -1945,18 +1946,29 @@ class Llama:
|
|||
subfolder = str(Path(matching_file).parent)
|
||||
filename = Path(matching_file).name
|
||||
|
||||
local_dir = "."
|
||||
|
||||
# download the file
|
||||
hf_hub_download(
|
||||
repo_id=repo_id,
|
||||
local_dir=local_dir,
|
||||
filename=filename,
|
||||
subfolder=subfolder,
|
||||
local_dir=local_dir,
|
||||
local_dir_use_symlinks=local_dir_use_symlinks,
|
||||
cache_dir=cache_dir,
|
||||
)
|
||||
|
||||
model_path = os.path.join(local_dir, filename)
|
||||
if local_dir is None:
|
||||
model_path = hf_hub_download(
|
||||
repo_id=repo_id,
|
||||
filename=filename,
|
||||
subfolder=subfolder,
|
||||
local_dir=local_dir,
|
||||
local_dir_use_symlinks=local_dir_use_symlinks,
|
||||
cache_dir=cache_dir,
|
||||
local_files_only=True,
|
||||
|
||||
)
|
||||
else:
|
||||
model_path = os.path.join(local_dir, filename)
|
||||
|
||||
return cls(
|
||||
model_path=model_path,
|
||||
|
|
|
@ -1498,9 +1498,21 @@ class SchemaConverter:
|
|||
item_rule_name = self.visit(
|
||||
schema["items"], f'{name}{"-" if name else ""}item'
|
||||
)
|
||||
rule = (
|
||||
f'"[" space ({item_rule_name} ("," space {item_rule_name})*)? "]" space'
|
||||
)
|
||||
list_item_operator = f'("," space {item_rule_name})'
|
||||
successive_items = ""
|
||||
min_items = schema.get("minItems", 0)
|
||||
if min_items > 0:
|
||||
first_item = f"({item_rule_name})"
|
||||
successive_items = list_item_operator * (min_items - 1)
|
||||
min_items -= 1
|
||||
else:
|
||||
first_item = f"({item_rule_name})?"
|
||||
max_items = schema.get("maxItems")
|
||||
if max_items is not None and max_items > min_items:
|
||||
successive_items += (list_item_operator + "?") * (max_items - min_items - 1)
|
||||
else:
|
||||
successive_items += list_item_operator + "*"
|
||||
rule = f'"[" space {first_item} {successive_items} "]" space'
|
||||
return self._add_rule(rule_name, rule)
|
||||
|
||||
else:
|
||||
|
|
Loading…
Reference in a new issue