llama : use LLAMA_TOKEN_NULL (#11062)

ggml-ci
This commit is contained in:
Georgi Gerganov 2025-01-06 10:52:15 +02:00 committed by GitHub
parent 5047dd3546
commit 727368c60f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 53 additions and 54 deletions

View file

@ -507,7 +507,7 @@ static std::string tokens_to_str(llama_context * ctx, Iter begin, Iter end) {
// format incomplete utf-8 multibyte character for output
static std::string tokens_to_output_formatted_string(const llama_context * ctx, const llama_token token) {
std::string out = token == -1 ? "" : common_token_to_piece(ctx, token);
std::string out = token == LLAMA_TOKEN_NULL ? "" : common_token_to_piece(ctx, token);
// if the size is 1 and first bit is 1, meaning it's a partial character
// (size > 1 meaning it's already a known token)