From bd241db879c44833d83c0ef790200972c4332081 Mon Sep 17 00:00:00 2001 From: KerfuffleV2 Date: Thu, 9 Nov 2023 16:56:27 -0700 Subject: [PATCH] Add JSON dumping support to gguf-dump.py Which I kind of regret now --- gguf-py/scripts/gguf-dump.py | 59 +++++++++++++++++++++++++++++++++--- 1 file changed, 54 insertions(+), 5 deletions(-) diff --git a/gguf-py/scripts/gguf-dump.py b/gguf-py/scripts/gguf-dump.py index 923898d3b..1606cad54 100755 --- a/gguf-py/scripts/gguf-dump.py +++ b/gguf-py/scripts/gguf-dump.py @@ -3,6 +3,9 @@ import argparse import os import sys from pathlib import Path +from typing import Any + +import numpy as np # Necessary to load the local gguf package if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent / 'gguf-py').exists(): @@ -13,7 +16,50 @@ from gguf import GGUFReader, GGUFValueType # noqa: E402 # For more information about what field.parts and field.data represent, # please see the comments in the modify_gguf.py example. -def dump_metadata(reader: GGUFReader, dump_tensors: bool = True) -> None: +def dump_metadata(reader: GGUFReader, args: argparse.Namespace) -> None: + host_endian = 'LITTLE' if np.uint32(1) == np.uint32(1).newbyteorder("<") else 'BIG' + if reader.byte_order == 'S': + file_endian = 'BIG' if host_endian == 'LITTLE' else 'LITTLE' + else: + file_endian = host_endian + if args.json: + import json + metadata: dict[str, Any] = {} + tensors: dict[str, Any] = {} + result = { + "filename": args.model, + "endian": file_endian, + "metadata": metadata, + "tensors": tensors, + } + for field in reader.fields.values(): + curr: dict[str, Any] = { + "type": field.types[0].name if field.types else 'UNKNOWN', + "offset": field.offset, + } + metadata[field.name] = curr + if field.types[:1] == [GGUFValueType.ARRAY]: + curr["array_types"] = [t.name for t in field.types][1:] + if not args.json_array: + continue + itype = field.types[-1] + if itype == GGUFValueType.STRING: + curr["value"] = [str(bytes(field.parts[idx]), encoding="utf-8") for idx in field.data] + else: + curr["value"] = [pv for idx in field.data for pv in field.parts[idx].tolist()], + elif field.types[0] == GGUFValueType.STRING: + curr["value"] = str(bytes(field.parts[-1]), encoding="utf-8") + else: + curr["value"] = field.parts[-1].tolist()[0] + for tensor in reader.tensors: + tensors[tensor.name] = { + "shape": tensor.shape.tolist(), + "type": tensor.tensor_type.name, + "offset": tensor.field.offset, + } + json.dump(result, sys.stdout) + return + print(f'* File is {file_endian} endian, script is running on a {host_endian} endian host.') print(f'\n* Dumping {len(reader.fields)} key/value pair(s)') for n, field in enumerate(reader.fields.values(), 1): if not field.types: @@ -31,7 +77,7 @@ def dump_metadata(reader: GGUFReader, dump_tensors: bool = True) -> None: elif field.types[0] in reader.gguf_scalar_to_np: print(' = {0}'.format(field.parts[-1][0]), end = '') print() - if not dump_tensors: + if args.no_tensors: return print(f'\n* Dumping {len(reader.tensors)} tensor(s)') for n, tensor in enumerate(reader.tensors, 1): @@ -41,12 +87,15 @@ def dump_metadata(reader: GGUFReader, dump_tensors: bool = True) -> None: def main() -> None: parser = argparse.ArgumentParser(description="Dump GGUF file metadata") - parser.add_argument("model", type=str, help="GGUF format model filename") + parser.add_argument("model", type=str, help="GGUF format model filename") parser.add_argument("--no-tensors", action="store_true", help="Don't dump tensor metadata") + parser.add_argument("--json", action="store_true", help="Produce JSON output") + parser.add_argument("--json-array", action="store_true", help="Include full array values in JSON output (long)") args = parser.parse_args(None if len(sys.argv) > 1 else ["--help"]) - print(f'* Loading: {args.model}') + if not args.json: + print(f'* Loading: {args.model}') reader = GGUFReader(args.model, 'r') - dump_metadata(reader, not args.no_tensors) + dump_metadata(reader, args) if __name__ == '__main__':