llama : fix MiniCPM (#5392)
* fix bug for norm_rms_eps missing * to align with the same order as convert.py for model write * fix: undo HF models permute tensor * update for flake8 lint
This commit is contained in:
parent
a6e514a85f
commit
4aa43fab56
2 changed files with 63 additions and 2 deletions
|
@ -2947,6 +2947,8 @@ static void llm_load_hparams(
|
|||
} break;
|
||||
case LLM_ARCH_MINICPM:
|
||||
{
|
||||
ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps);
|
||||
|
||||
switch (hparams.n_layer) {
|
||||
case 40: model.type = e_model::MODEL_2B; break;
|
||||
default: model.type = e_model::MODEL_UNKNOWN;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue