diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index a0e2caf94..015d6a2e4 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -65,7 +65,18 @@ int main(int argc, char **argv) { std::string str = llama_detokenize_bpe(ctx, std::vector(1, i)); try { auto cps = unicode_cpts_from_utf8(str); - std::vector tokens = llama_tokenize(ctx, str, false); + std::vector tokens = llama_tokenize(ctx, str, false, true); + if (tokens.size() > 1) { + fprintf(stderr, + "%s : error: token %d detokenizes to '%s'(%zu) but " + "tokenization of this to multiple tokens: [", + __func__, i, str.c_str(), str.length()); + fprintf(stderr, "%d", tokens[0]); + for (size_t i = 1; i < tokens.size(); i++) { + fprintf(stderr, ", %d", tokens[i]); + } + fprintf(stderr, "]\n"); + } std::string check = llama_detokenize_bpe(ctx, tokens); if (check != str) { fprintf(stderr, "%s : error: token %d detokenizes to '%s'(%zu) but tokenization of this detokenizes to '%s'(%zu)\n",