We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d5a99df commit 23f6a43Copy full SHA for 23f6a43
src/transformers/models/llama/modeling_llama.py
@@ -107,7 +107,7 @@ def __init__(
107
else:
108
# BC: "rope_type" was originally "type"
109
if config.rope_scaling is not None:
110
- self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling["type"])
+ self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling.get("type"))
111
112
self.rope_type = "default"
113
self.max_seq_len_cached = config.max_position_embeddings
0 commit comments