added null check for llava decode
This commit is contained in:
parent
2a358fb0c4
commit
252f3a88ac
1 changed files with 6 additions and 4 deletions
|
@ -16076,10 +16076,12 @@ static int llama_decode_internal(
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (uint32_t i = 0; i < n_tokens_all; ++i) {
|
if(batch_all.token != nullptr) {
|
||||||
if (batch_all.token[i] < 0 || (uint32_t)batch_all.token[i] >= lctx.model.vocab.n_vocab) {
|
for (uint32_t i = 0; i < n_tokens_all; ++i) {
|
||||||
LLAMA_LOG_ERROR("%s: invalid token[%d] = %d", __func__, i, batch_all.token[i]);
|
if (batch_all.token[i] < 0 || (uint32_t)batch_all.token[i] >= lctx.model.vocab.n_vocab) {
|
||||||
return -1;
|
LLAMA_LOG_ERROR("%s: invalid token[%d] = %d", __func__, i, batch_all.token[i]);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue