From a463fb7668af6c8199de6fb9f6d73e3d675f6672 Mon Sep 17 00:00:00 2001 From: Christian Falch <875252+chrfalch@users.noreply.github.com> Date: Sat, 1 Apr 2023 18:46:47 +0200 Subject: [PATCH] Update llama.h Review Comments Co-authored-by: Pavol Rusnak --- llama.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.h b/llama.h index da8f7f600..b4769ed20 100644 --- a/llama.h +++ b/llama.h @@ -96,7 +96,7 @@ extern "C" { // Sets the KV cache containing the current context for the model LLAMA_API void llama_set_kv_cache( struct llama_context * ctx, - uint8_t * kv_cache, + const uint8_t * kv_cache, size_t n_size, int n_token_count);