From 80b381b940f772084cdc62284a6d6ba01dcecdba Mon Sep 17 00:00:00 2001 From: toyer <2042519524@qq.com> Date: Wed, 3 Jul 2024 02:55:47 +0000 Subject: [PATCH] fix conflicts --- include/llama.h | 3 +-- src/llama.cpp | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/include/llama.h b/include/llama.h index a5fb088ea..16095d9a7 100644 --- a/include/llama.h +++ b/include/llama.h @@ -91,8 +91,7 @@ extern "C" { LLAMA_VOCAB_PRE_TYPE_CHATGLM3 = 16, LLAMA_VOCAB_PRE_TYPE_CHATGLM4 = 17, LLAMA_VOCAB_PRE_TYPE_VIKING = 18, - LLAMA_VOCAB_PRE_TYPE_VIKING = 19, - LLAMA_VOCAB_PRE_TYPE_JAIS = 20, + LLAMA_VOCAB_PRE_TYPE_JAIS = 19, }; // note: these values should be synchronized with ggml_rope diff --git a/src/llama.cpp b/src/llama.cpp index b3b95f72c..cf0add3ab 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -12567,6 +12567,7 @@ struct llm_build_context { cur = llm_build_norm(ctx0, ffn_inp, hparams, model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); cb(cur, "ffn_norm", il); cur = llm_build_ffn(ctx0, cur,