revert unnecessary change
This commit is contained in:
parent
1dc02150bc
commit
a92c920eec
1 changed files with 13 additions and 16 deletions
|
@ -6,6 +6,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import mmap
|
||||||
import struct
|
import struct
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import Any, Literal, NamedTuple, TypeVar, Union
|
from typing import Any, Literal, NamedTuple, TypeVar, Union
|
||||||
|
@ -91,7 +92,7 @@ class GGUFReader:
|
||||||
file_mode = "rb" if mode == 'r' else 'rb+'
|
file_mode = "rb" if mode == 'r' else 'rb+'
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
self.data = open(path, mode=file_mode)
|
self.data = open(path, mode=file_mode)
|
||||||
self.mmap = np.memmap(path, mode = mode)
|
self.mmap = np.memmap(self.data, mode = mode)
|
||||||
offs = 0
|
offs = 0
|
||||||
|
|
||||||
# Check for GGUF magic
|
# Check for GGUF magic
|
||||||
|
@ -132,6 +133,8 @@ class GGUFReader:
|
||||||
offs += self.alignment - padding
|
offs += self.alignment - padding
|
||||||
self.data_offset = offs
|
self.data_offset = offs
|
||||||
self._build_tensors(offs, tensors_fields)
|
self._build_tensors(offs, tensors_fields)
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
self.data.close()
|
self.data.close()
|
||||||
|
|
||||||
_DT = TypeVar('_DT', bound = npt.DTypeLike)
|
_DT = TypeVar('_DT', bound = npt.DTypeLike)
|
||||||
|
@ -145,23 +148,17 @@ class GGUFReader:
|
||||||
return self.tensors[idx]
|
return self.tensors[idx]
|
||||||
|
|
||||||
def _get(
|
def _get(
|
||||||
self, offset: int, dtype: npt.DTypeLike, count: int = 1, override_order: None | Literal['I', 'S', '<'] = None, lazy: bool = False,
|
self, offset: int, dtype: npt.DTypeLike, count: int = 1, override_order: None | Literal['I', 'S', '<'] = None,
|
||||||
) -> npt.NDArray[Any]:
|
) -> npt.NDArray[Any]:
|
||||||
count = int(count)
|
count = int(count)
|
||||||
itemsize = np.dtype(dtype).itemsize
|
itemsize = np.dtype(dtype).itemsize
|
||||||
if not lazy:
|
new_offset = offset + itemsize * count
|
||||||
self.data.seek(offset)
|
self.data.seek(new_offset)
|
||||||
data = (
|
return (
|
||||||
np.frombuffer(self.data.read(itemsize * count), dtype = dtype, count = count)
|
self.mmap[offset:new_offset]
|
||||||
.newbyteorder(override_order or self.byte_order)
|
.view(dtype = dtype)[:count]
|
||||||
)
|
.newbyteorder(override_order or self.byte_order)
|
||||||
return data if self.mode == 'r' else data.copy()
|
)
|
||||||
else:
|
|
||||||
return (
|
|
||||||
self.mmap[offset:offset + itemsize * count]
|
|
||||||
.view(dtype = dtype)[:count]
|
|
||||||
.newbyteorder(override_order or self.byte_order)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _push_field(self, field: ReaderField, skip_sum: bool = False) -> int:
|
def _push_field(self, field: ReaderField, skip_sum: bool = False) -> int:
|
||||||
if field.name in self.fields:
|
if field.name in self.fields:
|
||||||
|
@ -328,7 +325,7 @@ class GGUFReader:
|
||||||
n_elements = n_elems,
|
n_elements = n_elems,
|
||||||
n_bytes = n_bytes,
|
n_bytes = n_bytes,
|
||||||
data_offset = data_offs,
|
data_offset = data_offs,
|
||||||
data = self._get(data_offs, item_type, item_count, lazy=True).reshape(np_dims),
|
data = self._get(data_offs, item_type, item_count).reshape(np_dims),
|
||||||
field = field,
|
field = field,
|
||||||
))
|
))
|
||||||
self.tensors = tensors
|
self.tensors = tensors
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue