diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index 638875bb536dc3..ec8f575b6c3ea3 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -611,8 +611,9 @@ class TrainingArguments: The options should be separated by whitespaces. optim (`str` or [`training_args.OptimizerNames`], *optional*, defaults to `"adamw_torch"`): - The optimizer to use: adamw_hf, adamw_torch, adamw_torch_fused, adamw_apex_fused, adamw_anyprecision or - adafactor. + The optimizer to use, such as "adamw_hf", "adamw_torch", "adamw_torch_fused", "adamw_apex_fused", "adamw_anyprecision", + "adafactor". See `OptimizerNames` in [training_args.py](https://github.com/huggingface/transformers/blob/main/src/transformers/training_args.py) + for a full list of optimizers. optim_args (`str`, *optional*): Optional arguments that are supplied to AnyPrecisionAdamW. group_by_length (`bool`, *optional*, defaults to `False`):