From a49299bb984186cd504e910d40468790a4f1a631 Mon Sep 17 00:00:00 2001 From: Howard Su Date: Tue, 27 Jun 2023 08:37:18 -0700 Subject: [PATCH] Remove alignment_prevents_mmap which is not more needed. --- llama.cpp | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/llama.cpp b/llama.cpp index ecb0d0a3e..d76c3abd3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -581,22 +581,9 @@ struct llama_model_loader { if (!llama_mmap::SUPPORTED) { use_mmap = false; } - if (use_mmap && alignment_prevents_mmap()) { - fprintf(stderr, "llama.cpp: can't use mmap because tensors are not aligned; convert to new format to avoid this\n"); - use_mmap = false; - } this->use_mmap = use_mmap; } - bool alignment_prevents_mmap() { - for (const llama_load_tensor & lt : tensors_map.tensors) { - if (lt.file_off & 3) { - return true; - } - } - return false; - } - void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const { *ctx_size_p = *mmapped_size_p = 0; for (const llama_load_tensor & lt : tensors_map.tensors) {