From 8772c255abe4e1ed9fa8c8bb2dae4589ce844463 Mon Sep 17 00:00:00 2001 From: mendax0110 Date: Wed, 2 Aug 2023 10:33:16 +0200 Subject: [PATCH] make use_buff and get_buf_max_mem static --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 159ce967c..8b9d0cbdb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -406,7 +406,7 @@ struct llama_context { ggml_mpi_context * ctx_mpi = NULL; #endif - void use_buf(struct ggml_context * ctx, int i) { + static void use_buf(struct ggml_context * ctx, int i) { #if defined(LLAMA_USE_SCRATCH) size_t last_size = 0; @@ -428,7 +428,7 @@ struct llama_context { #endif } - size_t get_buf_max_mem(int i) const { + static size_t get_buf_max_mem(int i) { #if defined(LLAMA_USE_SCRATCH) return buf_max_size[i]; #else