We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a982c02 commit 476be08Copy full SHA for 476be08
src/transformers/training_args.py
@@ -1720,7 +1720,7 @@ def _setup_devices(self) -> "torch.device":
1720
self.distributed_state = None
1721
if not self.use_ipex and "ACCELERATE_USE_IPEX" not in os.environ:
1722
os.environ["ACCELERATE_USE_IPEX"] = "false"
1723
- if self.use_cpu:
+ if self.use_cpu or os.environ.get("ACCELERATE_USE_CPU", False):
1724
self.distributed_state = PartialState(cpu=True, backend=self.ddp_backend)
1725
self._n_gpu = 0
1726
elif is_sagemaker_mp_enabled():
0 commit comments