fix: Remove unused LLM_KV_ATTENTION_LAYER_COUNT
I'd added this at one point, but it's not actually needed Branch: BambaArchitecture Signed-off-by: Gabe Goodhart <ghart@us.ibm.com>
This commit is contained in:
parent
97e6ba8d99
commit
b83e9a6cd2
1 changed files with 0 additions and 1 deletions
|
@ -310,7 +310,6 @@ enum llm_kv {
|
|||
LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT,
|
||||
LLM_KV_ATTENTION_SLIDING_WINDOW,
|
||||
LLM_KV_ATTENTION_SCALE,
|
||||
LLM_KV_ATTENTION_LAYER_COUNT,
|
||||
LLM_KV_ATTENTION_LAYER_INDICES,
|
||||
|
||||
LLM_KV_ROPE_DIMENSION_COUNT,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue