Bugfix: n_batch should always be <= n_ctx
This commit is contained in:
parent
248b0566fa
commit
5075c16fcc
1 changed files with 1 additions and 1 deletions
|
@ -68,7 +68,7 @@ class Llama:
|
||||||
maxlen=self.last_n_tokens_size,
|
maxlen=self.last_n_tokens_size,
|
||||||
)
|
)
|
||||||
self.tokens_consumed = 0
|
self.tokens_consumed = 0
|
||||||
self.n_batch = n_batch
|
self.n_batch = min(n_ctx, n_batch)
|
||||||
|
|
||||||
self.n_threads = n_threads or multiprocessing.cpu_count()
|
self.n_threads = n_threads or multiprocessing.cpu_count()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue