llama: dbrx: remove wrong attn output layer in model arch

This commit is contained in:
Pierrick HYMBERT 2024-04-06 20:43:46 +02:00
parent 916b91852b
commit 03da419fc0

View file

@ -935,7 +935,6 @@ static const std::map<llm_arch, std::map<llm_tensor, std::string>> LLM_TENSOR_NA
{ LLM_TENSOR_OUTPUT_NORM, "output_norm" },
{ LLM_TENSOR_OUTPUT, "output" },
{ LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
{ LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
{ LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
{ LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
{ LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },