constants.py : add layer norm eps
This commit is contained in:
parent
87c34e4dd4
commit
0790c121aa
1 changed files with 2 additions and 0 deletions
|
@ -27,6 +27,8 @@ KEY_ATTENTION_HEAD_COUNT = "{llm}.attention.head_count"
|
||||||
KEY_ATTENTION_HEAD_COUNT_KV = "{llm}.attention.head_count_kv"
|
KEY_ATTENTION_HEAD_COUNT_KV = "{llm}.attention.head_count_kv"
|
||||||
KEY_ATTENTION_MAX_ALIBI_BIAS = "{llm}.attention.max_alibi_bias"
|
KEY_ATTENTION_MAX_ALIBI_BIAS = "{llm}.attention.max_alibi_bias"
|
||||||
KEY_ATTENTION_CLAMP_KQV = "{llm}.attention.clamp_kqv"
|
KEY_ATTENTION_CLAMP_KQV = "{llm}.attention.clamp_kqv"
|
||||||
|
KEY_ATTENTION_LAYERNORM_EPS = "{llm}.attention.layer_norm_epsilon"
|
||||||
|
KEY_ATTENTION_LAYERNORM_RMS_EPS = "{llm}.attention.layer_norm_rms_epsilon"
|
||||||
|
|
||||||
# RoPE
|
# RoPE
|
||||||
KEY_ROPE_DIMENSION_COUNT = "{llm}.rope.dimension_count"
|
KEY_ROPE_DIMENSION_COUNT = "{llm}.rope.dimension_count"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue