From f3532ff80c8dbf3147cc290f0839d28ff7b3fb69 Mon Sep 17 00:00:00 2001 From: S Date: Fri, 5 Apr 2024 10:37:25 +0100 Subject: [PATCH] Whitespace --- convert-hf-to-gguf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 91730e0d8..7e601170e 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -2344,14 +2344,14 @@ class CommandR2Model(Model): # max_position_embeddings = 8192 in config.json but model was actually # trained on 128k context length - self.hparams["max_position_embeddings"] = self.hparams["model_max_length"] def set_gguf_parameters(self): super().set_gguf_parameters() self.gguf_writer.add_logit_scale(self.hparams["logit_scale"]) self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.NONE) - + + ###### CONVERSION LOGIC ######