llama : fix detection of control-like user-defined tokens
This commit is contained in:
parent
6b961e3d24
commit
56df1fcdcb
2 changed files with 4 additions and 3 deletions
|
@ -5513,7 +5513,8 @@ static void llm_load_vocab(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((token_data.attr & LLAMA_TOKEN_ATTR_USER_DEFINED) && token_data.text.find('<') && token_data.text.rfind('>')) {
|
if ((token_data.attr & LLAMA_TOKEN_ATTR_USER_DEFINED) && !token_data.text.empty() &&
|
||||||
|
token_data.text.front() == '<' && token_data.text.back() == '>') {
|
||||||
// Some models mark some added tokens which ought to be control tokens as not special.
|
// Some models mark some added tokens which ought to be control tokens as not special.
|
||||||
// (e.g. command-r, command-r-plus, deepseek-coder)
|
// (e.g. command-r, command-r-plus, deepseek-coder)
|
||||||
// TODO: should this be fixed in the convert script instead?
|
// TODO: should this be fixed in the convert script instead?
|
||||||
|
|
|
@ -195,7 +195,7 @@ int main(int argc, char **argv) {
|
||||||
const bool add_special = false;
|
const bool add_special = false;
|
||||||
|
|
||||||
for (const auto & test_kv : k_tests) {
|
for (const auto & test_kv : k_tests) {
|
||||||
const std::vector<llama_token> res = llama_tokenize(ctx, test_kv.first, add_special);
|
const std::vector<llama_token> res = llama_tokenize(ctx, test_kv.first, add_special, false);
|
||||||
|
|
||||||
printf("\n");
|
printf("\n");
|
||||||
printf("src: '%s'\n", test_kv.first.c_str());
|
printf("src: '%s'\n", test_kv.first.c_str());
|
||||||
|
@ -253,7 +253,7 @@ int main(int argc, char **argv) {
|
||||||
{
|
{
|
||||||
const auto t_start = ggml_time_us();
|
const auto t_start = ggml_time_us();
|
||||||
|
|
||||||
res = llama_tokenize(ctx, text, add_special);
|
res = llama_tokenize(ctx, text, add_special, false);
|
||||||
|
|
||||||
const auto t_end = ggml_time_us();
|
const auto t_end = ggml_time_us();
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue