Skip to content

Commit

Permalink
proper update
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Sep 22, 2024
1 parent 80dcc1d commit 73590ad
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
12 changes: 6 additions & 6 deletions examples/diff-conversion/configuration_my_new_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# the file from the diff. If any change should be done, please apply the change to the
# diff.py file directly. One of our CI enforces this
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨

from ...configuration_utils import PretrainedConfig
from ...modeling_rope_utils import rope_config_validation

Expand Down Expand Up @@ -110,10 +111,9 @@ class MyNewModelConfig(PretrainedConfig):
Whether to use a bias in up_proj, down_proj and gate_proj layers in the MLP layers.
head_dim (`int`, *optional*):
The attention head dimension. If None, it will default to hidden_size // num_heads
new_param (`int`, *optional*, defaults to `False`):
A fun new parameter
r
new_param (`int`, *optional*, defaults to `False`):
A fun new parameter
```python
>>> from transformers import MyNewModelModel, MyNewModelConfig
Expand Down Expand Up @@ -157,8 +157,6 @@ def __init__(
new_param=0,
**kwargs,
):
self.mlp_bias = mlp_bias
self.new_param = new_param
self.vocab_size = vocab_size
self.max_position_embeddings = max_position_embeddings
self.hidden_size = hidden_size
Expand All @@ -182,7 +180,7 @@ def __init__(
self.attention_dropout = attention_dropout
self.head_dim = head_dim if head_dim is not None else self.hidden_size // self.num_attention_heads
# Validate the correctness of rotary position embeddings parameters
# BC: if there is a 'type' field, move it to 'rope_type'.
# BC: if there is a 'type' field, copy it it to 'rope_type'.
if self.rope_scaling is not None and "type" in self.rope_scaling:
self.rope_scaling["rope_type"] = self.rope_scaling["type"]
rope_config_validation(self)
Expand All @@ -194,3 +192,5 @@ def __init__(
tie_word_embeddings=tie_word_embeddings,
**kwargs,
)
self.mlp_bias = mlp_bias
self.new_param = new_param
2 changes: 1 addition & 1 deletion examples/diff-conversion/modular_my_new_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ class MyNewModelConfig(LlamaConfig):
"""

def __init__(self, mlp_bias=True, new_param=0, **super_kwargs):
super().__init__(self, **super_kwargs)
self.mlp_bias = mlp_bias
self.new_param = new_param
super().__init__(self, **super_kwargs)

0 comments on commit 73590ad

Please sign in to comment.