From 0b4e849a24e07a6ac4e0936170b31dee33f4e0e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 24 Mar 2023 22:15:06 +0200 Subject: [PATCH] Fix number of layers in 30B and 65B --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4caf607b7..071b54e47 100644 --- a/llama.cpp +++ b/llama.cpp @@ -371,11 +371,11 @@ static bool llama_model_load( model.type = e_model::MODEL_13B; } - if (hparams.n_layer == 52) { + if (hparams.n_layer == 60) { model.type = e_model::MODEL_30B; } - if (hparams.n_layer == 64) { + if (hparams.n_layer == 80) { model.type = e_model::MODEL_65B; }