From b5531e14350531943953846301c94c96f6ab2aca Mon Sep 17 00:00:00 2001 From: Don Mahurin <@> Date: Fri, 26 May 2023 06:35:15 -0700 Subject: [PATCH] low_level_api_chat_cpp.py: Fix missing antiprompt output in chat. --- examples/low_level_api_chat_cpp.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/examples/low_level_api_chat_cpp.py b/examples/low_level_api_chat_cpp.py index 205a5b76c..e67cd8e43 100644 --- a/examples/low_level_api_chat_cpp.py +++ b/examples/low_level_api_chat_cpp.py @@ -409,12 +409,15 @@ n_keep = {self.params.n_keep} # replace end of text token with newline token when in interactive mode if (id == llama_cpp.llama_token_eos() and self.params.interactive and not self.params.instruct): id = self.llama_token_newline[0] + self.embd.append(id) if (self.use_antiprompt()): # tokenize and inject first reverse prompt self.embd_inp += self.first_antiprompt[0] - - # add it to the context - self.embd.append(id) + for id in self.first_antiprompt[0]: + self.embd.append(id) + else: + # add it to the context + self.embd.append(id) # echo this to console self.output_echo = True