diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index fd0d1556a..85a59a14d 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -62,9 +62,6 @@ int main(int argc, char **argv) { const int n_vocab = llama_n_vocab(model); for (int i = 0; i < n_vocab; ++i) { - if (llama_token_get_type(ctx, i) == LLAMA_TOKEN_TYPE_USER_DEFINED) { - continue; - } std::string str = llama_detokenize_bpe(ctx, std::vector(1, i)); try { auto cps = codepoints_from_utf8(str);