Fixed list elements typing

This commit is contained in:
Maximilian-Winter 2023-05-25 10:54:08 +02:00
parent da463e6c8c
commit c2585b6889

View file

@ -316,7 +316,7 @@ class Llama:
mirostat_tau: llama_cpp.c_float, mirostat_tau: llama_cpp.c_float,
mirostat_eta: llama_cpp.c_float, mirostat_eta: llama_cpp.c_float,
penalize_nl: bool = True, penalize_nl: bool = True,
logits_processors: List[Callable[[List[llama_cpp.c_int], List[llama_cpp.c_float]], List[float]]] = None logits_processors: List[Callable[[List[int], List[float]], List[float]]] = None
): ):
assert self.ctx is not None assert self.ctx is not None
assert len(self.eval_logits) > 0 assert len(self.eval_logits) > 0
@ -444,7 +444,7 @@ class Llama:
mirostat_eta: float = 0.1, mirostat_eta: float = 0.1,
mirostat_tau: float = 5.0, mirostat_tau: float = 5.0,
penalize_nl: bool = True, penalize_nl: bool = True,
logits_processors: List[Callable[[List[llama_cpp.c_int], List[llama_cpp.c_float]], List[float]]] = None logits_processors: List[Callable[[List[int], List[float]], List[float]]] = None
): ):
"""Sample a token from the model. """Sample a token from the model.
@ -496,7 +496,7 @@ class Llama:
mirostat_mode: int = 0, mirostat_mode: int = 0,
mirostat_tau: float = 5.0, mirostat_tau: float = 5.0,
mirostat_eta: float = 0.1, mirostat_eta: float = 0.1,
logits_processors: List[Callable[[List[llama_cpp.c_int], List[llama_cpp.c_float]], List[float]]] = None logits_processors: List[Callable[[List[int], List[float]], List[float]]] = None
) -> Generator[int, Optional[Sequence[int]], None]: ) -> Generator[int, Optional[Sequence[int]], None]:
"""Create a generator of tokens from a prompt. """Create a generator of tokens from a prompt.
@ -651,8 +651,8 @@ class Llama:
mirostat_tau: float = 5.0, mirostat_tau: float = 5.0,
mirostat_eta: float = 0.1, mirostat_eta: float = 0.1,
model: Optional[str] = None, model: Optional[str] = None,
logits_processors: List[Callable[[List[llama_cpp.c_int], List[llama_cpp.c_float]], List[float]]] = None, logits_processors: List[Callable[[List[int], List[float]], List[float]]] = None,
stopping_criterias: List[Callable[[List[int], List[llama_cpp.c_float]], bool]] = None, stopping_criterias: List[Callable[[List[int], List[float]], bool]] = None,
) -> Union[Iterator[Completion], Iterator[CompletionChunk]]: ) -> Union[Iterator[Completion], Iterator[CompletionChunk]]:
assert self.ctx is not None assert self.ctx is not None
@ -1035,8 +1035,8 @@ class Llama:
mirostat_tau: float = 5.0, mirostat_tau: float = 5.0,
mirostat_eta: float = 0.1, mirostat_eta: float = 0.1,
model: Optional[str] = None, model: Optional[str] = None,
logits_processors: List[Callable[[List[llama_cpp.c_int], List[llama_cpp.c_float]], List[float]]] = None, logits_processors: List[Callable[[List[int], List[float]], List[float]]] = None,
stopping_criterias: List[Callable[[List[int], List[llama_cpp.c_float]], bool]] = None stopping_criterias: List[Callable[[List[int], List[float]], bool]] = None
) -> Union[Completion, Iterator[CompletionChunk]]: ) -> Union[Completion, Iterator[CompletionChunk]]:
"""Generate text from a prompt. """Generate text from a prompt.