Skip to content

Commit 316e530

Browse files
authored
[DLMED] fix log print (#212)
Signed-off-by: Nic Ma <nma@nvidia.com>
1 parent 1f5d7b4 commit 316e530

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

acceleration/distributed_training/unet_evaluation_workflows.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@
8282

8383

8484
def evaluate(args):
85+
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
8586
if args.local_rank == 0 and not os.path.exists(args.dir):
8687
# create 16 random image, mask paris for evaluation
8788
print(f"generating synthetic data to {args.dir} (this may take a while)")
@@ -149,7 +150,6 @@ def evaluate(args):
149150
),
150151
]
151152
if dist.get_rank() == 0:
152-
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
153153
val_handlers.extend(
154154
[
155155
StatsHandler(output_transform=lambda x: None),

acceleration/distributed_training/unet_training_workflows.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@
8484

8585

8686
def train(args):
87+
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
8788
if args.local_rank == 0 and not os.path.exists(args.dir):
8889
# create 40 random image, mask paris for training
8990
print(f"generating synthetic data to {args.dir} (this may take a while)")
@@ -160,7 +161,6 @@ def train(args):
160161
LrScheduleHandler(lr_scheduler=lr_scheduler, print_lr=True),
161162
]
162163
if dist.get_rank() == 0:
163-
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
164164
train_handlers.extend(
165165
[
166166
StatsHandler(tag_name="train_loss", output_transform=lambda x: x["loss"]),

0 commit comments

Comments
 (0)