From fc8773d3096fdbd1266c0e62d7136289fbae631f Mon Sep 17 00:00:00 2001 From: mare5x Date: Sun, 30 Jun 2024 20:14:18 +0200 Subject: [PATCH] token healing : handle more special tokens Infill tokens were being rolled back in certain cases. --- common/sampling.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index bdcdde057..b5c6b9ad3 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -67,7 +67,16 @@ static llama_token_healing_output llama_token_healing_get_prefix( const llama_model * model = llama_get_model(ctx_main); auto is_special_token = [&](const llama_token token_id) { - return llama_token_is_control(model, token_id) || llama_token_is_eog(model, token_id); + return llama_token_is_control(model, token_id) + || llama_token_bos (model) == token_id + || llama_token_eos (model) == token_id + || llama_token_cls (model) == token_id + || llama_token_sep (model) == token_id + || llama_token_pad (model) == token_id + || llama_token_prefix (model) == token_id + || llama_token_middle (model) == token_id + || llama_token_suffix (model) == token_id + || llama_token_eot (model) == token_id; }; if (th_type == llama_token_healing_type::DYNAMIC_ONCE || th_type == llama_token_healing_type::DYNAMIC_MULTI) {