Skip to content

Commit e65e511

Browse files
authored
turn excessive noise off (#1293)
1 parent cce85b8 commit e65e511

File tree

3 files changed

+8
-8
lines changed

3 files changed

+8
-8
lines changed

deepspeed/runtime/engine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2051,7 +2051,7 @@ def _copy_recovery_script(self, save_path):
20512051
script = "zero_to_fp32.py"
20522052
src = os.path.join(base_dir, "utils", script)
20532053
dst = os.path.join(save_path, script)
2054-
logger.info(f"creating recovery script {dst}")
2054+
#logger.info(f"creating recovery script {dst}")
20552055
copyfile(src, dst)
20562056
# make executable
20572057
os.chmod(dst, os.stat(dst).st_mode | stat.S_IEXEC)
@@ -2064,7 +2064,7 @@ def _save_zero_checkpoint(self, save_path, tag):
20642064
ds_version=version)
20652065
torch.save(zero_sd, zero_checkpoint_name)
20662066
self._copy_recovery_script(save_path)
2067-
logger.info('zero checkpoint saved {}'.format(zero_checkpoint_name))
2067+
#logger.info('zero checkpoint saved {}'.format(zero_checkpoint_name))
20682068

20692069
def _zero3_consolidated_fp16_state_dict(self):
20702070
"""

deepspeed/runtime/pipe/module.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -577,10 +577,10 @@ def load_state_dir(self, load_dir, strict=True):
577577

578578
layer.load_state_dict(checkpoint)
579579

580-
if self._grid.data_parallel_id == 0:
581-
logger.info(
582-
f'RANK={self.global_rank} Loaded layer={idx+self._local_start} file={load_path}'
583-
)
580+
# if self._grid.data_parallel_id == 0:
581+
# logger.info(
582+
# f'RANK={self.global_rank} Loaded layer={idx+self._local_start} file={load_path}'
583+
# )
584584

585585
self._synchronize_tied_weights()
586586

deepspeed/runtime/state_dict_factory.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def load(self,
7878
merge_count = 1
7979
if num_ckpt == mp_world_size:
8080
assert os.path.exists(load_path)
81-
logger.info(f'rank: {mp_rank} loading checkpoint: {load_path}')
81+
#logger.info(f'rank: {mp_rank} loading checkpoint: {load_path}')
8282
sd = torch.load(load_path, map_location=lambda storage, loc: storage)
8383

8484
if quantize:
@@ -158,7 +158,7 @@ def set_module(self, sd, module):
158158
return sd
159159

160160
def check_ckpt_list(self):
161-
logger.info(f'checkpoint file list: {self.ckpt_list}')
161+
#logger.info(f'checkpoint file list: {self.ckpt_list}')
162162
assert len(self.ckpt_list) > 0
163163

164164
sd = torch.load(self.ckpt_list[0], map_location=lambda storage, loc: storage)

0 commit comments

Comments
 (0)