dont add space when using special tokens

This commit is contained in:
staviq 2023-10-13 01:14:23 +02:00
parent 5974d617c0
commit 1c28116de4

View file

@ -6727,7 +6727,7 @@ static std::vector<llama_vocab::id> llama_tokenize_internal(const llama_vocab &
// by modifying llm_tokenizer_x to operate with string offsets like pre-tokenizer // by modifying llm_tokenizer_x to operate with string offsets like pre-tokenizer
// and passing 'add space prefix' as bool argument // and passing 'add space prefix' as bool argument
// //
auto raw_text = " " + fragment.raw_text.substr(fragment.offset, fragment.length); auto raw_text = (special?"":" ") + fragment.raw_text.substr(fragment.offset, fragment.length);
#ifdef PRETOKENIZERDEBUG #ifdef PRETOKENIZERDEBUG
fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str());