From d4c257d1f98eca0370a67dadc3a8575e3b6e424b Mon Sep 17 00:00:00 2001 From: devojony <61173062+devojony@users.noreply.github.com> Date: Mon, 22 Jul 2024 13:20:06 +0800 Subject: [PATCH] fix: Android example cannot be generated continuously When generation ends `completion_loop()` should return a NULL, not the empty string --- examples/llama.android/llama/src/main/cpp/llama-android.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama.android/llama/src/main/cpp/llama-android.cpp b/examples/llama.android/llama/src/main/cpp/llama-android.cpp index 92a6b16b1..2aafe2316 100644 --- a/examples/llama.android/llama/src/main/cpp/llama-android.cpp +++ b/examples/llama.android/llama/src/main/cpp/llama-android.cpp @@ -409,7 +409,7 @@ Java_android_llama_cpp_LLamaAndroid_completion_1loop( const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value); if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { - return env->NewStringUTF(""); + return nullptr; } auto new_token_chars = llama_token_to_piece(context, new_token_id);