convert.py : fix llama/llama2 conversion due to vocab_size=-1

This commit is contained in:
slaren 2023-11-29 19:28:07 +01:00
parent 1f5cd83275
commit f3ed3c00f5

View file

@ -267,7 +267,7 @@ class Params:
n_ctx = 2048
return Params(
n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]),
n_vocab = model["tok_embeddings.weight"].shape[0],
n_embd = config["dim"],
n_layer = config["n_layers"],
n_ctx = n_ctx,