make linter happy

This commit is contained in:
slaren 2024-04-02 18:21:45 +02:00
parent d08a1f4860
commit 9530398013

View file

@ -1257,19 +1257,19 @@ def convert_model_names(model: LazyModel, params: Params, skip_unknown: bool) ->
# merge experts into one tensor
if params.n_experts > 0:
for l in range(params.n_layer):
for i_l in range(params.n_layer):
for w in range(1, 4):
experts = []
for e in range(params.n_experts):
if f"layers.{l}.feed_forward.experts.{e}.w{w}.weight" in model:
experts.append(model[f"layers.{l}.feed_forward.experts.{e}.w{w}.weight"])
del tmp[f"layers.{l}.feed_forward.experts.{e}.w{w}.weight"]
elif f"model.layers.{l}.block_sparse_moe.experts.{e}.w{w}.weight" in model:
experts.append(model[f"model.layers.{l}.block_sparse_moe.experts.{e}.w{w}.weight"])
del tmp[f"model.layers.{l}.block_sparse_moe.experts.{e}.w{w}.weight"]
if f"layers.{i_l}.feed_forward.experts.{e}.w{w}.weight" in model:
experts.append(model[f"layers.{i_l}.feed_forward.experts.{e}.w{w}.weight"])
del tmp[f"layers.{i_l}.feed_forward.experts.{e}.w{w}.weight"]
elif f"model.layers.{i_l}.block_sparse_moe.experts.{e}.w{w}.weight" in model:
experts.append(model[f"model.layers.{i_l}.block_sparse_moe.experts.{e}.w{w}.weight"])
del tmp[f"model.layers.{i_l}.block_sparse_moe.experts.{e}.w{w}.weight"]
else:
raise ValueError(f"Expert tensor not found: layers.{l}.feed_forward.experts.{e}.w{w}.weight")
tmp[f"layers.{l}.feed_forward.experts.w{w}.weight"] = pack_experts_lazy(experts)
raise ValueError(f"Expert tensor not found: layers.{i_l}.feed_forward.experts.{e}.w{w}.weight")
tmp[f"layers.{i_l}.feed_forward.experts.w{w}.weight"] = pack_experts_lazy(experts)
# HF models permut or pack some of the tensors, so we need to undo that
for i in itertools.count():