From 65a1a5856286d95d56da8497256176d74185a968 Mon Sep 17 00:00:00 2001 From: Francis Couture-Harpin Date: Sun, 12 May 2024 12:56:03 -0400 Subject: [PATCH] convert-hf : add missing ftype to Baichuan and Xverse I didn't notice these on my first pass. --- convert-hf-to-gguf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index cc249f21a..e126dc1db 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -846,6 +846,7 @@ class BaichuanModel(Model): self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_file_type(self.ftype) if self.hparams.get("rope_scaling") is not None and "factor" in self.hparams["rope_scaling"]: if self.hparams["rope_scaling"].get("type") == "linear": @@ -968,6 +969,7 @@ class XverseModel(Model): self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_file_type(self.ftype) if self.hparams.get("rope_scaling") is not None and "factor" in self.hparams["rope_scaling"]: if self.hparams["rope_scaling"].get("type") == "linear":