Skip to content

Commit

Permalink
[BE]: Apply pyupgrade yield from and unit test alias upgrades (pytorc…
Browse files Browse the repository at this point in the history
…h#94309)

Applies some more harmless pyupgrades. This one gets rid of deprecated aliases in unit_tests and more upgrades yield for loops into yield from generators which are more performance and propagates more information / exceptions from original generator. This is the modern recommended way of forwarding generators.
Pull Request resolved: pytorch#94309
Approved by: https://github.com/albanD
  • Loading branch information
Skylion007 authored and pytorchmergebot committed Feb 7, 2023
1 parent 895d478 commit 748bac8
Show file tree
Hide file tree
Showing 47 changed files with 308 additions and 329 deletions.
24 changes: 12 additions & 12 deletions caffe2/python/checkpoint_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,20 +78,20 @@ def fetch_total(session):
session, checkpoint = builder()
job.compile(LocalSession)
num_epochs = JobRunner(job, checkpoint).train(session)
self.assertEquals(num_epochs, len(EXPECTED_TOTALS))
self.assertEquals(fetch_total(session), EXPECTED_TOTALS[-1])
self.assertEqual(num_epochs, len(EXPECTED_TOTALS))
self.assertEqual(fetch_total(session), EXPECTED_TOTALS[-1])

for initial_epoch in range(1, num_epochs + 1):
session, checkpoint = builder()
JobRunner(
job,
checkpoint, resume_from_epoch=initial_epoch
).train(session)
self.assertEquals(fetch_total(session), EXPECTED_TOTALS[-1])
self.assertEqual(fetch_total(session), EXPECTED_TOTALS[-1])

for epoch in range(1, num_epochs + 1):
session.run(checkpoint.load(epoch))
self.assertEquals(fetch_total(session),
self.assertEqual(fetch_total(session),
EXPECTED_TOTALS[epoch - 1])

def test_single_checkpoint(self):
Expand Down Expand Up @@ -141,7 +141,7 @@ def test_ckpt_name_and_load_model_from_ckpts(self):
epoch = 5
node_name = 'trainer_%d' % node_id
expected_db_name = tmpdir + '/' + node_name + '.5'
self.assertEquals(
self.assertEqual(
checkpoint.get_ckpt_db_name(node_name, epoch),
expected_db_name)
shutil.rmtree(tmpdir)
Expand All @@ -159,15 +159,15 @@ def test_ckpt_name_and_load_model_from_ckpts(self):
job.compile(LocalSession)
job_runner = JobRunner(job, checkpoint)
num_epochs = job_runner.train(session)
self.assertEquals(num_epochs, len(EXPECTED_TOTALS))
self.assertEqual(num_epochs, len(EXPECTED_TOTALS))

# There are 17 global blobs after finishing up the job runner.
# (only blobs on init_group are checkpointed)
self.assertEquals(len(ws.blobs), 17)
self.assertEqual(len(ws.blobs), 17)

ws = workspace.C.Workspace()
session = LocalSession(ws)
self.assertEquals(len(ws.blobs), 0)
self.assertEqual(len(ws.blobs), 0)
model_blob_names = ['trainer_1/task_2/GivenTensorInt64Fill:0',
'trainer_2/task_2/GivenTensorInt64Fill:0']
checkpoint = MultiNodeCheckpointManager(tmpdir, 'minidb')
Expand All @@ -190,7 +190,7 @@ def test_ckpt_name_and_load_model_from_ckpts(self):
# Check that all the model blobs are loaded.
for blob_name in model_blob_names:
self.assertTrue(ws.has_blob(blob_name))
self.assertEquals(
self.assertEqual(
ws.fetch_blob(blob_name),
np.array([EXPECTED_TOTALS[epoch - 1]]))
self.assertFalse(
Expand Down Expand Up @@ -227,7 +227,7 @@ def test_upload_checkpoint(self):
job, checkpoint,
upload_task_group_builder=local_upload_builder)
num_epochs = job_runner.train(session)
self.assertEquals(num_epochs, len(EXPECTED_TOTALS))
self.assertEqual(num_epochs, len(EXPECTED_TOTALS))

# The uploaded files should exist now.
for node_id in range(num_nodes):
Expand Down Expand Up @@ -260,7 +260,7 @@ def test_ckpt_save_failure(self):
num_epochs = job_runner.train(session)
# make sure all epochs are executed even though saving the checkpoint failed
# Saving checkpoint failure should not cause job failure
self.assertEquals(num_epochs, len(EXPECTED_TOTALS))
self.assertEqual(num_epochs, len(EXPECTED_TOTALS))

def test_download_group_simple(self):
"""
Expand Down Expand Up @@ -332,7 +332,7 @@ def fetch_total(session):
checkpoint,
resume_from_epoch=initial_epoch
).train(session)
self.assertEquals(fetch_total(session), EXPECTED_TOTALS[-1])
self.assertEqual(fetch_total(session), EXPECTED_TOTALS[-1])

finally:
shutil.rmtree(tmpdir)
12 changes: 6 additions & 6 deletions caffe2/python/core_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,13 +459,13 @@ def test_extract_simple(self):
self.assertFalse("xx/data" in op.input)

# Note: image input should not be included
self.assertEquals(ops[0].type, "Conv")
self.assertEquals(ops[1].type, "FC")
self.assertEquals(ops[2].type, "FC")
self.assertEquals(len(ops), 3)
self.assertEqual(ops[0].type, "Conv")
self.assertEqual(ops[1].type, "FC")
self.assertEqual(ops[2].type, "FC")
self.assertEqual(len(ops), 3)

# test rename happened
self.assertEquals(ops[0].input[0], "image")
self.assertEqual(ops[0].input[0], "image")

# Check export blobs
self.assertTrue("image" not in export_blobs)
Expand All @@ -474,7 +474,7 @@ def test_extract_simple(self):

# Check external inputs/outputs
self.assertTrue("image" in predict_net.Proto().external_input)
self.assertEquals(set(["pred"]), set(predict_net.Proto().external_output))
self.assertEqual(set(["pred"]), set(predict_net.Proto().external_output))
self.assertEqual(
set(predict_net.Proto().external_input) -
set([str(p) for p in model.params]), set(["image"])
Expand Down
32 changes: 16 additions & 16 deletions caffe2/python/layer_parameter_sharing_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,26 +20,26 @@ def test_layer_parameter_name(self):
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w, 'global_scope/fc/w')
self.assertEquals(fc1_output(), 'global_scope/fc/output')
self.assertEqual(self.model.layers[-1].w, 'global_scope/fc/w')
self.assertEqual(fc1_output(), 'global_scope/fc/output')

with scope.NameScope('nested_scope'):
fc2_output = self.model.FC(
fc1_output,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/nested_scope/fc/w')
self.assertEquals(fc2_output(),
self.assertEqual(fc2_output(),
'global_scope/nested_scope/fc/output')

fc3_output = self.model.FC(
fc1_output,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/nested_scope/fc_auto_0/w')
self.assertEquals(fc3_output(),
self.assertEqual(fc3_output(),
'global_scope/nested_scope/fc_auto_0/output')

def test_layer_shared_parameter_name_different_namescopes(self):
Expand All @@ -51,19 +51,19 @@ def test_layer_shared_parameter_name_different_namescopes(self):
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/scope_0/fc/w')
self.assertEquals(fc1_output(),
self.assertEqual(fc1_output(),
'global_scope/scope_0/fc/output')

with scope.NameScope('scope_1'):
fc2_output = self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/scope_0/fc/w')
self.assertEquals(fc2_output(),
self.assertEqual(fc2_output(),
'global_scope/scope_1/fc/output')

def test_layer_shared_parameter_name_within_same_namescope(self):
Expand All @@ -74,14 +74,14 @@ def test_layer_shared_parameter_name_within_same_namescope(self):
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/fc/w')

self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/fc/w')

def test_layer_shared_parameter_name_within_same_namescope_customized_name(self):
Expand All @@ -93,15 +93,15 @@ def test_layer_shared_parameter_name_within_same_namescope_customized_name(self)
output_dims,
name='shared_fc'
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/shared_fc/w')

self.model.FC(
self.model.input_feature_schema.float_features,
output_dims,
name='new_fc'
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/shared_fc/w')

def test_layer_shared_parameter_name_different_shapes(self):
Expand All @@ -112,7 +112,7 @@ def test_layer_shared_parameter_name_different_shapes(self):
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
self.assertEqual(self.model.layers[-1].w,
'global_scope/fc/w')

with self.assertRaisesRegex(ValueError, 'Got inconsistent shapes .*'):
Expand Down Expand Up @@ -145,7 +145,7 @@ def test_layer_duplicated_parameter_init(self):
op_outputs.extend(op.output)

# only fill these parameter blobs once
self.assertEquals(
self.assertEqual(
sorted(op_outputs),
['global_scope/shared_fc/b', 'global_scope/shared_fc/w']
)
Expand Down
2 changes: 1 addition & 1 deletion caffe2/python/layers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ def testSparseLookupSumPoolingWithEviction(self):
workspace.RunNetOnce(train_net.Proto())
embedding_after_training = workspace.FetchBlob("sparse_lookup/w")
# Verify row 0's value does not change after reset
self.assertEquals(embedding_after_training.all(), embedding_after_init.all())
self.assertEqual(embedding_after_training.all(), embedding_after_init.all())


def testSparseLookupSumPooling(self):
Expand Down
2 changes: 1 addition & 1 deletion caffe2/python/memonger_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def test_memonger_mix_cpu_gpu(self):
device_crossers = device_blobs[caffe2_pb2.CPU].intersection(
device_blobs[workspace.GpuDeviceType]
)
self.assertEquals(device_crossers, set())
self.assertEqual(device_crossers, set())

@given(input_dim=st.integers(min_value=4, max_value=4),
output_dim=st.integers(min_value=4, max_value=4),
Expand Down
24 changes: 12 additions & 12 deletions caffe2/python/modeling/parameter_sharing_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,56 +19,56 @@ class ParameterSharingTest(unittest.TestCase):
def test_parameter_sharing_default_scopes(self):
# Test no sharing default scopes
param_1 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_1, 'w')
self.assertEqual(param_1, 'w')
with scope.NameScope('scope'):
param_2 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_2, 'scope/w')
self.assertEqual(param_2, 'scope/w')
with scope.NameScope('scope_2'):
param_3 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_3, 'scope/scope_2/w')
self.assertEqual(param_3, 'scope/scope_2/w')

def test_parameter_sharing_nested_scopes(self):
# Test parameter sharing
with scope.NameScope('global_scope'):
with ParameterSharing({'model_b': 'model_a'}):
param_global = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_global, 'global_scope/w')
self.assertEqual(param_global, 'global_scope/w')
# This scope is overridden to match 'model_a'
with scope.NameScope('model_b'):
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
self.assertEquals(param_4, 'global_scope/model_a/w')
self.assertEqual(param_4, 'global_scope/model_a/w')
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_a/w')
self.assertEqual(param_5, 'global_scope/model_a/w')
# This scope is supposed to have not sharing
with scope.NameScope('model_c'):
with ParameterSharing({'shared_scope': ''}):
param_4 = parameter_sharing_context.get_parameter_name(
'w')
self.assertEquals(param_4, 'global_scope/model_c/w')
self.assertEqual(param_4, 'global_scope/model_c/w')
with scope.NameScope('shared_scope'):
param_5 = parameter_sharing_context.\
get_parameter_name('w')
self.assertEquals(param_5, 'global_scope/model_c/w')
self.assertEqual(param_5, 'global_scope/model_c/w')

def test_parameter_sharing_subscopes(self):
# Sharing only one of the subscopes
with ParameterSharing({'global_scope/b': 'global_scope/a'}):
with scope.NameScope('global_scope'):
param_6 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_6, 'global_scope/w')
self.assertEqual(param_6, 'global_scope/w')
with scope.NameScope('a'):
param_7 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_7, 'global_scope/a/w')
self.assertEqual(param_7, 'global_scope/a/w')
with scope.NameScope('b'):
param_8 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_8, 'global_scope/a/w')
self.assertEqual(param_8, 'global_scope/a/w')
with scope.NameScope('c'):
param_9 = parameter_sharing_context.get_parameter_name('w')
self.assertEquals(param_9, 'global_scope/c/w')
self.assertEqual(param_9, 'global_scope/c/w')

def test_create_param(self):
model = model_helper.ModelHelper(name="test")
Expand Down
32 changes: 16 additions & 16 deletions caffe2/python/net_builder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def test_ops(self):
]
for b, expected in expected:
actual = ws.blobs[str(b)].fetch()
self.assertEquals(actual, expected)
self.assertEqual(actual, expected)

def _expected_loop(self):
total = 0
Expand Down Expand Up @@ -152,7 +152,7 @@ def test_net_multi_use(self):
result = final_output(total)
with LocalSession() as session:
session.run(task)
self.assertEquals(2, result.fetch())
self.assertEqual(2, result.fetch())

def test_loops(self):
with Task() as task:
Expand All @@ -162,7 +162,7 @@ def test_loops(self):
expected = self._expected_loop()
actual = [o.fetch() for o in out_actual]
for e, a in zip(expected, actual):
self.assertEquals(e, a)
self.assertEqual(e, a)

def test_setup(self):
with Task() as task:
Expand All @@ -184,9 +184,9 @@ def test_setup(self):
o7_2 = final_output(seven_2)
with LocalSession() as session:
session.run(task)
self.assertEquals(o6.fetch(), 6)
self.assertEquals(o7_1.fetch(), 7)
self.assertEquals(o7_2.fetch(), 7)
self.assertEqual(o6.fetch(), 6)
self.assertEqual(o7_1.fetch(), 7)
self.assertEqual(o7_2.fetch(), 7)

def test_multi_instance_python_op(self):
"""
Expand All @@ -203,8 +203,8 @@ def test_multi_instance_python_op(self):
PythonOpStats.num_instances = 0
PythonOpStats.num_calls = 0
session.run(task)
self.assertEquals(PythonOpStats.num_instances, 64)
self.assertEquals(PythonOpStats.num_calls, 256)
self.assertEqual(PythonOpStats.num_instances, 64)
self.assertEqual(PythonOpStats.num_calls, 256)

def test_multi_instance(self):
NUM_INSTANCES = 10
Expand Down Expand Up @@ -242,9 +242,9 @@ def test_multi_instance(self):

with LocalSession() as session:
session.run(tg)
self.assertEquals(total1.fetch(), NUM_INSTANCES * NUM_ITERS)
self.assertEquals(total2.fetch(), NUM_INSTANCES * (NUM_ITERS ** 2))
self.assertEquals(total3.fetch(), NUM_INSTANCES * (NUM_ITERS ** 2))
self.assertEqual(total1.fetch(), NUM_INSTANCES * NUM_ITERS)
self.assertEqual(total2.fetch(), NUM_INSTANCES * (NUM_ITERS ** 2))
self.assertEqual(total3.fetch(), NUM_INSTANCES * (NUM_ITERS ** 2))

def test_if_net(self):
with NetBuilder() as nb:
Expand Down Expand Up @@ -303,11 +303,11 @@ def test_if_net(self):
y1_value = ws.blobs[str(y1)].fetch()
y2_value = ws.blobs[str(y2)].fetch()

self.assertEquals(first_res_value, 1)
self.assertEquals(second_res_value, 2)
self.assertEquals(y0_value, 1000)
self.assertEquals(y1_value, 101)
self.assertEquals(y2_value, 108)
self.assertEqual(first_res_value, 1)
self.assertEqual(second_res_value, 2)
self.assertEqual(y0_value, 1000)
self.assertEqual(y1_value, 101)
self.assertEqual(y2_value, 108)
self.assertTrue(str(local_blob) not in ws.blobs)

def test_while_net(self):
Expand Down
Loading

0 comments on commit 748bac8

Please sign in to comment.