Remove alignment_prevents_mmap which is not more needed.
This commit is contained in:
parent
73bcc5b144
commit
a49299bb98
1 changed files with 0 additions and 13 deletions
13
llama.cpp
13
llama.cpp
|
@ -581,22 +581,9 @@ struct llama_model_loader {
|
||||||
if (!llama_mmap::SUPPORTED) {
|
if (!llama_mmap::SUPPORTED) {
|
||||||
use_mmap = false;
|
use_mmap = false;
|
||||||
}
|
}
|
||||||
if (use_mmap && alignment_prevents_mmap()) {
|
|
||||||
fprintf(stderr, "llama.cpp: can't use mmap because tensors are not aligned; convert to new format to avoid this\n");
|
|
||||||
use_mmap = false;
|
|
||||||
}
|
|
||||||
this->use_mmap = use_mmap;
|
this->use_mmap = use_mmap;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool alignment_prevents_mmap() {
|
|
||||||
for (const llama_load_tensor & lt : tensors_map.tensors) {
|
|
||||||
if (lt.file_off & 3) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const {
|
void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const {
|
||||||
*ctx_size_p = *mmapped_size_p = 0;
|
*ctx_size_p = *mmapped_size_p = 0;
|
||||||
for (const llama_load_tensor & lt : tensors_map.tensors) {
|
for (const llama_load_tensor & lt : tensors_map.tensors) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue