low_level_api_chat_cpp.py: Fix missing antiprompt output in chat.

This commit is contained in:
Don Mahurin 2023-05-26 06:35:15 -07:00
parent fb79c567d2
commit b5531e1435

View file

@ -409,10 +409,13 @@ n_keep = {self.params.n_keep}
# replace end of text token with newline token when in interactive mode
if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct):
id = self.llama_token_newline[0]
self.embd.append(id)
if (self.use_antiprompt()):
# tokenize and inject first reverse prompt
self.embd_inp += self.first_antiprompt[0]
for id in self.first_antiprompt[0]:
self.embd.append(id)
else:
# add it to the context
self.embd.append(id)