Also ignore errors on input prompts

This commit is contained in:
Mug 2023-04-26 14:45:51 +02:00 committed by Don Mahurin
parent c8e6ac366a
commit 36b3494332

View file

@ -201,7 +201,7 @@ n_keep = {self.params.n_keep}
# tokenize a prompt # tokenize a prompt
def _tokenize(self, prompt, bos=True): def _tokenize(self, prompt, bos=True):
_arr = (llama_cpp.llama_token * (len(prompt) + 1))() _arr = (llama_cpp.llama_token * (len(prompt) + 1))()
_n = llama_cpp.llama_tokenize(self.ctx, prompt.encode("utf8"), _arr, len(_arr), bos) _n = llama_cpp.llama_tokenize(self.ctx, prompt.encode("utf8", errors="ignore"), _arr, len(_arr), bos)
return _arr[:_n] return _arr[:_n]
def set_color(self, c): def set_color(self, c):