Skip to content

Commit 686b9df

Browse files
committed
Disable loss rounding in training stats log
1 parent a127710 commit 686b9df

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/transformers/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2965,7 +2965,7 @@ def _maybe_log_save_evaluate(
29652965
# reset tr_loss to zero
29662966
tr_loss -= tr_loss
29672967

2968-
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
2968+
logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged)
29692969
if grad_norm is not None:
29702970
logs["grad_norm"] = grad_norm.item() if isinstance(grad_norm, torch.Tensor) else grad_norm
29712971
if learning_rate is not None:

0 commit comments

Comments
 (0)