dont add space when using special tokens
This commit is contained in:
parent
5974d617c0
commit
1c28116de4
1 changed files with 1 additions and 1 deletions
|
@ -6727,7 +6727,7 @@ static std::vector<llama_vocab::id> llama_tokenize_internal(const llama_vocab &
|
||||||
// by modifying llm_tokenizer_x to operate with string offsets like pre-tokenizer
|
// by modifying llm_tokenizer_x to operate with string offsets like pre-tokenizer
|
||||||
// and passing 'add space prefix' as bool argument
|
// and passing 'add space prefix' as bool argument
|
||||||
//
|
//
|
||||||
auto raw_text = " " + fragment.raw_text.substr(fragment.offset, fragment.length);
|
auto raw_text = (special?"":" ") + fragment.raw_text.substr(fragment.offset, fragment.length);
|
||||||
|
|
||||||
#ifdef PRETOKENIZERDEBUG
|
#ifdef PRETOKENIZERDEBUG
|
||||||
fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str());
|
fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str());
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue