Skip to content

Commit

Permalink
add master_grad
Browse files Browse the repository at this point in the history
  • Loading branch information
heavyrain-lzy committed Jan 9, 2024
1 parent d51edd0 commit 454505a
Showing 1 changed file with 1 addition and 3 deletions.
4 changes: 1 addition & 3 deletions paddlenlp/trainer/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -1174,9 +1174,6 @@ def is_segment_parallel_supported():
pipeline.micro_batch_size = self.per_device_train_batch_size
pipeline.schedule_mode = self.pipeline_schedule_mode

if self.amp_master_grad:
warnings.warn("`amp_master_grad` is not supported NOW in AutoParallel!")
self.amp_master_grad = False
logger.info(f"PP configs:{strategy.pipeline}, use master_grad: {self.amp_master_grad}")

if self.do_eval:
Expand Down Expand Up @@ -1260,6 +1257,7 @@ def is_segment_parallel_supported():
amp.enable = True
amp.dtype = "bfloat16" if self.bf16 else "float16"
amp.level = self.fp16_opt_level.lower()
amp.use_master_grad = self.amp_master_grad
amp.init_loss_scaling = self.scale_loss
amp.custom_black_list = self.amp_custom_black_list if self.amp_custom_black_list is not None else []
amp.custom_white_list = self.amp_custom_white_list if self.amp_custom_white_list is not None else []
Expand Down

0 comments on commit 454505a

Please sign in to comment.