Skip to content

Commit

Permalink
Fix typing annotations for FSDP and DeepSpeed in TrainingArguments (h…
Browse files Browse the repository at this point in the history
…uggingface#24549)

* Fix typing annotations for FSDP and DeepSpeed in TrainingArguments

* Change dict to Dict
  • Loading branch information
mryab authored Jun 28, 2023
1 parent daccde1 commit c5e29d4
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions src/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -976,12 +976,12 @@ class TrainingArguments:
)
},
)
fsdp_config: Optional[str] = field(
fsdp_config: Optional[Union[str, Dict]] = field(
default=None,
metadata={
"help": (
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
)
},
)
Expand All @@ -994,11 +994,11 @@ class TrainingArguments:
)
},
)
deepspeed: Optional[str] = field(
deepspeed: Optional[Union[str, Dict]] = field(
default=None,
metadata={
"help": (
"Enable deepspeed and pass the path to deepspeed json config file (e.g. ds_config.json) or an already"
"Enable deepspeed and pass the path to deepspeed json config file (e.g. `ds_config.json`) or an already"
" loaded json file as a dict"
)
},
Expand Down

0 comments on commit c5e29d4

Please sign in to comment.