Skip to content

Commit 96433ce

Browse files
authored
Updated versions for black usort and ufmt and reformatted codebase (#2930)
1 parent 9d38754 commit 96433ce

File tree

149 files changed

+40
-821
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

149 files changed

+40
-821
lines changed

.github/workflows/trigger_circle_ci.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@ def assert_pipeline_created(pipeline_id, headers):
4242

4343

4444
def get_workflow_id(pipeline_id, headers):
45-
4645
while True:
4746
result = requests.get(f"https://circleci.com/api/v2/pipeline/{pipeline_id}/workflow", headers=headers)
4847
assert_result(result, 200)
@@ -59,7 +58,6 @@ def get_workflow_id(pipeline_id, headers):
5958

6059

6160
def assert_workflows_successful(pipeline_id, headers):
62-
6361
workflow_id = get_workflow_id(pipeline_id, headers)
6462

6563
base_url = "https://app.circleci.com/pipelines/github/pytorch/ignite"
@@ -84,7 +82,6 @@ def assert_workflows_successful(pipeline_id, headers):
8482

8583

8684
if __name__ == "__main__":
87-
8885
print("Trigger new pipeline on Circle-CI")
8986

9087
if "CIRCLE_TOKEN" not in os.environ:

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ repos:
1515
exclude_types: ["python", "jupyter", "shell", "gitignore"]
1616

1717
- repo: https://github.com/omnilib/ufmt
18-
rev: v1.3.1
18+
rev: v2.1.0
1919
hooks:
2020
- id: ufmt
2121
additional_dependencies:
22-
- black == 21.12b0
23-
- usort == 1.0.1
22+
- black == 23.3.0
23+
- usort == 1.0.6
2424

2525
- repo: https://github.com/pycqa/flake8
2626
rev: 6.0.0

docker/test_image.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121

2222

2323
def run_python_cmd(cmd):
24-
2524
try_except_cmd = f"""
2625
import warnings
2726
warnings.filterwarnings("ignore")
@@ -65,7 +64,6 @@ def main():
6564

6665

6766
if __name__ == "__main__":
68-
6967
parser = argparse.ArgumentParser("Check docker image script")
7068
parser.add_argument("image", type=str, help="Docker image to check")
7169
args = parser.parse_args()

examples/contrib/cifar10/main.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121

2222
def training(local_rank, config):
23-
2423
rank = idist.get_rank()
2524
manual_seed(config["seed"] + rank)
2625
device = idist.device()
@@ -205,7 +204,6 @@ def run(
205204
raise RuntimeError("The value of with_amp should be False if backend is xla")
206205

207206
with idist.Parallel(backend=backend, **spawn_kwargs) as parallel:
208-
209207
parallel.run(training, config)
210208

211209

@@ -283,7 +281,6 @@ def log_basic_info(logger, config):
283281

284282

285283
def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, config, logger):
286-
287284
device = idist.device()
288285

289286
# Setup Ignite trainer:
@@ -299,7 +296,6 @@ def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, con
299296
scaler = GradScaler(enabled=with_amp)
300297

301298
def train_step(engine, batch):
302-
303299
x, y = batch[0], batch[1]
304300

305301
if x.device != device:

examples/contrib/cifar10_qat/main.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020

2121
def training(local_rank, config):
22-
2322
rank = idist.get_rank()
2423
manual_seed(config["seed"] + rank)
2524
device = idist.device()
@@ -189,7 +188,6 @@ def run(
189188
spawn_kwargs["nproc_per_node"] = nproc_per_node
190189

191190
with idist.Parallel(backend=backend, **spawn_kwargs) as parallel:
192-
193191
parallel.run(training, config)
194192

195193

@@ -267,7 +265,6 @@ def log_basic_info(logger, config):
267265

268266

269267
def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, config, logger):
270-
271268
device = idist.device()
272269

273270
# Setup Ignite trainer:
@@ -283,7 +280,6 @@ def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, con
283280
scaler = GradScaler(enabled=with_amp)
284281

285282
def train_step(engine, batch):
286-
287283
x, y = batch[0], batch[1]
288284

289285
if x.device != device:

examples/contrib/transformers/main.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222

2323

2424
def training(local_rank, config):
25-
2625
rank = idist.get_rank()
2726
manual_seed(config["seed"] + rank)
2827
device = idist.device()
@@ -33,7 +32,6 @@ def training(local_rank, config):
3332

3433
output_path = config["output_dir"]
3534
if rank == 0:
36-
3735
now = datetime.now().strftime("%Y%m%d-%H%M%S")
3836
folder_name = f"{config['model']}_backend-{idist.backend()}-{idist.get_world_size()}_{now}"
3937
output_path = Path(output_path) / folder_name
@@ -207,7 +205,6 @@ def run(
207205
spawn_kwargs["nproc_per_node"] = nproc_per_node
208206

209207
with idist.Parallel(backend=backend, **spawn_kwargs) as parallel:
210-
211208
parallel.run(training, config)
212209

213210

@@ -293,7 +290,6 @@ def log_basic_info(logger, config):
293290

294291

295292
def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, config, logger):
296-
297293
device = idist.device()
298294

299295
# Setup Ignite trainer:
@@ -309,7 +305,6 @@ def create_trainer(model, optimizer, criterion, lr_scheduler, train_sampler, con
309305
scaler = GradScaler(enabled=with_amp)
310306

311307
def train_step(engine, batch):
312-
313308
input_batch = batch[0]
314309
labels = batch[1].view(-1, 1)
315310

examples/fast_neural_style/neural_style.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,6 @@ def train(args):
7878
running_avgs = OrderedDict()
7979

8080
def step(engine, batch):
81-
8281
x, _ = batch
8382
x = x.to(device)
8483

examples/gan/dcgan.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,6 @@ def main(
207207
alpha,
208208
output_dir,
209209
):
210-
211210
# seed
212211
check_manual_seed(seed)
213212

@@ -243,7 +242,6 @@ def get_noise():
243242

244243
# The main function, processing a batch of examples
245244
def step(engine, batch):
246-
247245
# unpack the batch. It comes from a dataset, so we have <images, labels> pairs. Discard labels.
248246
real, _ = batch
249247
real = real.to(device)

examples/references/classification/imagenet/dataflow.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ def opencv_loader(path):
1919

2020

2121
def get_dataloader(dataset, sampler=None, shuffle=False, limit_num_samples=None, **kwargs):
22-
2322
if limit_num_samples is not None:
2423
g = torch.Generator().manual_seed(limit_num_samples)
2524
indices = torch.randperm(len(dataset), generator=g)[:limit_num_samples]
@@ -38,7 +37,6 @@ def get_train_val_loaders(
3837
limit_train_num_samples: Optional[int] = None,
3938
limit_val_num_samples: Optional[int] = None,
4039
) -> Tuple[DataLoader, DataLoader, DataLoader]:
41-
4240
train_ds = ImageFolder(
4341
Path(root_path) / "train",
4442
transform=lambda sample: train_transforms(image=sample)["image"],

examples/references/classification/imagenet/main.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424

2525

2626
def training(local_rank, config, logger, with_clearml):
27-
2827
rank = idist.get_rank()
2928
manual_seed(config.seed + local_rank)
3029

@@ -305,7 +304,6 @@ def run_training(config_filepath, backend="nccl", with_clearml=True):
305304
assert config_filepath.exists(), f"File '{config_filepath.as_posix()}' is not found"
306305

307306
with idist.Parallel(backend=backend) as parallel:
308-
309307
logger = setup_logger(name="ImageNet Training", distributed_rank=idist.get_rank())
310308

311309
config = ConfigObject(config_filepath)
@@ -327,7 +325,6 @@ def run_training(config_filepath, backend="nccl", with_clearml=True):
327325

328326

329327
def get_model_weights(config, logger, with_clearml):
330-
331328
path = ""
332329
if with_clearml:
333330
from clearml import Model
@@ -352,7 +349,6 @@ def get_model_weights(config, logger, with_clearml):
352349

353350

354351
def evaluation(local_rank, config, logger, with_clearml):
355-
356352
rank = idist.get_rank()
357353
device = idist.device()
358354
manual_seed(config.seed + local_rank)
@@ -428,5 +424,4 @@ def run_evaluation(config_filepath, backend="nccl", with_clearml=True):
428424

429425

430426
if __name__ == "__main__":
431-
432427
fire.Fire({"training": run_training, "eval": run_evaluation})

0 commit comments

Comments
 (0)