Fix number of layers in 30B and 65B

This commit is contained in:
Georgi Gerganov 2023-03-24 22:15:06 +02:00
parent 3634c312bc
commit 0b4e849a24
No known key found for this signature in database
GPG key ID: 449E073F9DC10735

View file

@ -371,11 +371,11 @@ static bool llama_model_load(
model.type = e_model::MODEL_13B;
}
if (hparams.n_layer == 52) {
if (hparams.n_layer == 60) {
model.type = e_model::MODEL_30B;
}
if (hparams.n_layer == 64) {
if (hparams.n_layer == 80) {
model.type = e_model::MODEL_65B;
}