low_level_api_chat_cpp.py: Fix missing antiprompt output in chat.

This commit is contained in:
Don Mahurin 2023-05-26 06:35:15 -07:00
parent 433a2e3e8a
commit 0fa2ec4903

View file

@ -382,10 +382,13 @@ n_keep = {self.params.n_keep}
# replace end of text token with newline token when in interactive mode # replace end of text token with newline token when in interactive mode
if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct): if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct):
id = self.llama_token_newline[0] id = self.llama_token_newline[0]
self.embd.append(id)
if (self.use_antiprompt()): if (self.use_antiprompt()):
# tokenize and inject first reverse prompt # tokenize and inject first reverse prompt
self.embd_inp += self.first_antiprompt[0] self.embd_inp += self.first_antiprompt[0]
for id in self.first_antiprompt[0]:
self.embd.append(id)
else:
# add it to the context # add it to the context
self.embd.append(id) self.embd.append(id)