From dcc5d4241d9daeb746ec02c6b935baff919c52fc Mon Sep 17 00:00:00 2001 From: teleprint-me <77757836+teleprint-me@users.noreply.github.com> Date: Sun, 19 May 2024 00:06:30 -0400 Subject: [PATCH] fix: Remove dangling if statement --- convert-hf-to-gguf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index fc8214831..99b0213e6 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -430,8 +430,7 @@ class Model: models = json.load("models/checksums.json") for model in models: if checksum == model["checksum"]: - if - logger.debug(f"tokenizer.ggml.pre: {repr(result)}") + logger.debug(f"tokenizer.ggml.pre: {repr(model['repo'])}") logger.debug(f"tokenizer checksum: {checksum}") return model["tokt"] # NOTE: Use the enum to id the vocab @@ -448,7 +447,6 @@ class Model: logger.warning("**************************************************************************************") logger.warning("\n") raise NotImplementedError("BPE pre-tokenizer was not recognized - update get_vocab_base_pre()") - # Marker: End get_vocab_base_pre def _set_vocab_gpt2(self) -> None: