Remove alignment_prevents_mmap which is not more needed.

This commit is contained in:
Howard Su 2023-06-27 08:37:18 -07:00
parent 73bcc5b144
commit a49299bb98

View file

@ -581,22 +581,9 @@ struct llama_model_loader {
if (!llama_mmap::SUPPORTED) { if (!llama_mmap::SUPPORTED) {
use_mmap = false; use_mmap = false;
} }
if (use_mmap && alignment_prevents_mmap()) {
fprintf(stderr, "llama.cpp: can't use mmap because tensors are not aligned; convert to new format to avoid this\n");
use_mmap = false;
}
this->use_mmap = use_mmap; this->use_mmap = use_mmap;
} }
bool alignment_prevents_mmap() {
for (const llama_load_tensor & lt : tensors_map.tensors) {
if (lt.file_off & 3) {
return true;
}
}
return false;
}
void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const { void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const {
*ctx_size_p = *mmapped_size_p = 0; *ctx_size_p = *mmapped_size_p = 0;
for (const llama_load_tensor & lt : tensors_map.tensors) { for (const llama_load_tensor & lt : tensors_map.tensors) {