added null check for llava decode

This commit is contained in:
l3utterfly 2024-09-15 21:45:48 +09:00
parent 2a358fb0c4
commit 252f3a88ac

View file

@ -16076,12 +16076,14 @@ static int llama_decode_internal(
return -1; return -1;
} }
if(batch_all.token != nullptr) {
for (uint32_t i = 0; i < n_tokens_all; ++i) { for (uint32_t i = 0; i < n_tokens_all; ++i) {
if (batch_all.token[i] < 0 || (uint32_t)batch_all.token[i] >= lctx.model.vocab.n_vocab) { if (batch_all.token[i] < 0 || (uint32_t)batch_all.token[i] >= lctx.model.vocab.n_vocab) {
LLAMA_LOG_ERROR("%s: invalid token[%d] = %d", __func__, i, batch_all.token[i]); LLAMA_LOG_ERROR("%s: invalid token[%d] = %d", __func__, i, batch_all.token[i]);
return -1; return -1;
} }
} }
}
const auto & model = lctx.model; const auto & model = lctx.model;
const auto & hparams = model.hparams; const auto & hparams = model.hparams;