Fix a bug with wrong type

This commit is contained in:
Mug 2023-05-06 22:22:28 +02:00
parent 996f63e9e1
commit fd80ddf703

View file

@ -330,7 +330,7 @@ n_keep = {self.params.n_keep}
# Apply params.logit_bias map # Apply params.logit_bias map
for key, value in self.params.logit_bias.items(): for key, value in self.params.logit_bias.items():
logits[key] += llama_cpp.c_float(value) logits[key] += value
_arr = (llama_cpp.llama_token_data * n_vocab)(*[ _arr = (llama_cpp.llama_token_data * n_vocab)(*[
llama_cpp.llama_token_data(token_id, logits[token_id], 0.0) llama_cpp.llama_token_data(token_id, logits[token_id], 0.0)