Revert "dont add space when using special tokens"

This reverts commit 1c28116de4.
This commit is contained in:
Jared Van Bortel 2023-11-14 16:48:33 -05:00
parent 6bb4908a17
commit 735ffe3d2f

View file

@ -6283,7 +6283,7 @@ static std::vector<llama_vocab::id> llama_tokenize_internal(const llama_vocab &
// by modifying llm_tokenizer_x to operate with string offsets like pre-tokenizer
// and passing 'add space prefix' as bool argument
//
auto raw_text = (special ? "" : " ") + fragment.raw_text.substr(fragment.offset, fragment.length);
auto raw_text = " " + fragment.raw_text.substr(fragment.offset, fragment.length);
#ifdef PRETOKENIZERDEBUG
fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str());