From a6dbec8822954379357e55c47847746d73a47daf Mon Sep 17 00:00:00 2001 From: Anas Ahouzi <112881240+aahouzi@users.noreply.github.com> Date: Sat, 24 Feb 2024 22:06:08 +0100 Subject: [PATCH] Support layer_norm_eps for LlavaStableLM Co-authored-by: Jared Van Bortel --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 712e55f74..4a565d6ee 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1077,7 +1077,7 @@ class StableLMModel(Model): self.gguf_writer.add_rope_dimension_count(int(hparams["partial_rotary_factor"] * (hparams["hidden_size"] // hparams["num_attention_heads"]))) self.gguf_writer.add_head_count(hparams["num_attention_heads"]) self.gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) - self.gguf_writer.add_layer_norm_eps(hparams["layer_norm_eps"]) + self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_eps", "norm_eps"]) class MixtralModel(Model):