From 8ac33ce0fffcbdf4a2acb382e89fe238e3c5825a Mon Sep 17 00:00:00 2001 From: Nigel Bosch Date: Thu, 24 Aug 2023 16:25:06 -0500 Subject: [PATCH] Save rope scale only for linear scaling --- convert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert.py b/convert.py index bc474ac7b..ee8a671a1 100755 --- a/convert.py +++ b/convert.py @@ -168,7 +168,7 @@ class Params: n_head = config["num_attention_heads"] n_head_kv = config["num_key_value_heads"] if "num_key_value_heads" in config else n_head f_norm_eps = config["rms_norm_eps"] - f_rope_scale = config["rope_scaling"]["factor"] if "rope_scaling" in config and "factor" in config["rope_scaling"] else None + f_rope_scale = config.get("rope_scaling", {}).get("factor", None) if config.get("rope_scaling", {}).get("type", "") == "linear" else None n_mult = Params.find_n_mult(n_ff, n_embd)