Fix number of layers in 30B and 65B
This commit is contained in:
parent
3634c312bc
commit
0b4e849a24
1 changed files with 2 additions and 2 deletions
|
@ -371,11 +371,11 @@ static bool llama_model_load(
|
|||
model.type = e_model::MODEL_13B;
|
||||
}
|
||||
|
||||
if (hparams.n_layer == 52) {
|
||||
if (hparams.n_layer == 60) {
|
||||
model.type = e_model::MODEL_30B;
|
||||
}
|
||||
|
||||
if (hparams.n_layer == 64) {
|
||||
if (hparams.n_layer == 80) {
|
||||
model.type = e_model::MODEL_65B;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue