Skip to content

Commit

Permalink
convert.py : fix llama/llama2 conversion due to vocab_size=-1 (#4258)
Browse files Browse the repository at this point in the history
  • Loading branch information
slaren authored Nov 30, 2023
1 parent 954e228 commit f4d973c
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params:
n_ctx = 2048

return Params(
n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]),
n_vocab = model["tok_embeddings.weight"].shape[0],
n_embd = config["dim"],
n_layer = config["n_layers"],
n_ctx = n_ctx,
Expand Down

0 comments on commit f4d973c

Please sign in to comment.