From c86cca806144a31e912b7b74683a45aa7b0987e9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 28 Oct 2023 13:21:29 +0300 Subject: [PATCH] llama : add comment about llama_sample_token_greedy() missing probs --- llama.h | 1 + 1 file changed, 1 insertion(+) diff --git a/llama.h b/llama.h index 2f2fee0e2..4f49b892e 100644 --- a/llama.h +++ b/llama.h @@ -658,6 +658,7 @@ extern "C" { float * mu); /// @details Selects the token with the highest probability. + /// Does not compute the token probabilities. Use llama_sample_softmax() instead. LLAMA_API llama_token llama_sample_token_greedy( struct llama_context * ctx, llama_token_data_array * candidates);