Add field to disable reseting between generations
This commit is contained in:
parent
22fa5a621f
commit
6595ad84bf
1 changed files with 4 additions and 1 deletions
|
@ -218,6 +218,7 @@ class Llama:
|
||||||
top_p: float,
|
top_p: float,
|
||||||
temp: float,
|
temp: float,
|
||||||
repeat_penalty: float,
|
repeat_penalty: float,
|
||||||
|
reset: bool = True,
|
||||||
) -> Generator[
|
) -> Generator[
|
||||||
llama_cpp.llama_token, Optional[Sequence[llama_cpp.llama_token]], None
|
llama_cpp.llama_token, Optional[Sequence[llama_cpp.llama_token]], None
|
||||||
]:
|
]:
|
||||||
|
@ -235,11 +236,13 @@ class Llama:
|
||||||
top_p: The top-p sampling parameter.
|
top_p: The top-p sampling parameter.
|
||||||
temp: The temperature parameter.
|
temp: The temperature parameter.
|
||||||
repeat_penalty: The repeat penalty parameter.
|
repeat_penalty: The repeat penalty parameter.
|
||||||
|
reset: Whether to reset the model state.
|
||||||
|
|
||||||
Yields:
|
Yields:
|
||||||
The generated tokens.
|
The generated tokens.
|
||||||
"""
|
"""
|
||||||
assert self.ctx is not None
|
assert self.ctx is not None
|
||||||
|
if reset:
|
||||||
self.reset()
|
self.reset()
|
||||||
while True:
|
while True:
|
||||||
self.eval(tokens)
|
self.eval(tokens)
|
||||||
|
|
Loading…
Reference in a new issue