Skip to content

Commit 23f6a43

Browse files
authored
fix (#32162)
1 parent d5a99df commit 23f6a43

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/transformers/models/llama/modeling_llama.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def __init__(
107107
else:
108108
# BC: "rope_type" was originally "type"
109109
if config.rope_scaling is not None:
110-
self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling["type"])
110+
self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling.get("type"))
111111
else:
112112
self.rope_type = "default"
113113
self.max_seq_len_cached = config.max_position_embeddings

0 commit comments

Comments
 (0)