Fix baichuan convert script not detecing model

This commit is contained in:
Galunid 2023-10-23 10:47:01 +02:00
parent 96981f37b1
commit f26df62651

View file

@ -110,7 +110,7 @@ print("gguf: loading model "+dir_model.name)
with open(dir_model / "config.json", "r", encoding="utf-8") as f: with open(dir_model / "config.json", "r", encoding="utf-8") as f:
hparams = json.load(f) hparams = json.load(f)
print("hello print: ",hparams["architectures"][0]) print("hello print: ",hparams["architectures"][0])
if hparams["architectures"][0] != "BaichuanForCausalLM": if hparams["architectures"][0] != "BaichuanForCausalLM" and hparams["architectures"][0] != "BaiChuanForCausalLM":
print("Model architecture not supported: " + hparams["architectures"][0]) print("Model architecture not supported: " + hparams["architectures"][0])
sys.exit() sys.exit()