From bd697ca77df238f0ee7382c0a4e575f18dc5df57 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 21 Oct 2024 00:09:56 +0200 Subject: [PATCH] llama : fix empty batch cause llama_batch_allocr to crash --- src/llama.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/llama.cpp b/src/llama.cpp index 1813dd29b..d64200402 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -21139,6 +21139,10 @@ struct llama_batch_allocr { // optionally fulfill the batch returned by llama_batch_get_one llama_batch_allocr(struct llama_context * ctx, struct llama_batch in_batch) { batch = in_batch; + if (batch.n_tokens == 0) { + // llama_(de|en)code_internal will return an error in this case + return; + } if (!batch.pos) { // determine the last position in KV cache llama_pos last_pos = -1;