fix baichuan1-13b W_pack convert bug
This commit is contained in:
parent
37873ae77a
commit
9fd5aa7ecb
1 changed files with 0 additions and 2 deletions
|
@ -239,8 +239,6 @@ for part_name in part_names:
|
|||
tmp[f"model.layers.{i}.self_attn.k_proj.weight"]=reverse_hf_permute_part(model_part[f"model.layers.{i}.self_attn.W_pack.weight"],1,head_count,head_count_kv)
|
||||
tmp[f"model.layers.{i}.self_attn.v_proj.weight"]=reverse_hf_part(model_part[f"model.layers.{i}.self_attn.W_pack.weight"],2)
|
||||
del tmp[f"model.layers.{i}.self_attn.W_pack.weight"]
|
||||
else:
|
||||
break
|
||||
|
||||
for name in model_part.keys():
|
||||
data = model_part[name]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue