llama : fix grok rope type

This commit is contained in:
Georgi Gerganov 2024-03-22 22:18:47 +02:00 committed by GitHub
parent 81ce9df3ee
commit abdc8ea34a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -13680,7 +13680,6 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) {
// use what we call a normal RoPE, operating on pairs of consecutive head values
case LLM_ARCH_LLAMA:
case LLM_ARCH_GROK:
case LLM_ARCH_BAICHUAN:
case LLM_ARCH_STARCODER:
case LLM_ARCH_PLAMO:
@ -13693,6 +13692,7 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) {
// the pairs of head values are offset by n_rot/2
case LLM_ARCH_FALCON:
case LLM_ARCH_GROK:
case LLM_ARCH_PERSIMMON:
case LLM_ARCH_BERT:
case LLM_ARCH_NOMIC_BERT: