Skip to content

Commit

Permalink
add option for resizing embeddings when adding new tokens (#2000)
Browse files Browse the repository at this point in the history
* add option for resizing embeddings when adding new tokens

* let's just be opinonated about this setting and set it to False
  • Loading branch information
winglian authored Oct 28, 2024
1 parent d3c45d2 commit e1e0556
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 1 deletion.
1 change: 1 addition & 0 deletions src/axolotl/utils/config/models/input/v0_4_1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,6 +549,7 @@ class Config:
resume_from_checkpoint: Optional[str] = None
auto_resume_from_checkpoints: Optional[bool] = None
resize_token_embeddings_to_32x: Optional[bool] = None
mean_resizing_embeddings: Optional[bool] = False

rl: Optional[RLType] = None
reward_model: Optional[bool] = None
Expand Down
5 changes: 4 additions & 1 deletion src/axolotl/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1042,7 +1042,10 @@ def load_model(self) -> Tuple[PreTrainedModel, Optional[PeftConfig]]:
hasattr(self.model, "get_input_embeddings")
and self.model.get_input_embeddings().num_embeddings < embeddings_len
):
self.model.resize_token_embeddings(embeddings_len)
resize_kwargs = {}
if self.cfg.mean_resizing_embeddings is not None:
resize_kwargs["mean_resizing"] = self.cfg.mean_resizing_embeddings
self.model.resize_token_embeddings(embeddings_len, **resize_kwargs)
else:
self.model.tie_weights()

Expand Down

0 comments on commit e1e0556

Please sign in to comment.