Skip to content
This repository was archived by the owner on Jul 7, 2023. It is now read-only.

Commit 2d78a7b

Browse files
lgeigerafrozenator
authored andcommitted
Remove unnecessary use of six.iterkeys (#1444)
1 parent c81f56c commit 2d78a7b

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

tensor2tensor/data_generators/problem.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -950,7 +950,7 @@ def _reverse_problem_hparams(p_hparams):
950950
# 'target', and each intended feature to swap has feature name 'input'.
951951
# In the future, remove need for this behavior.
952952
reversed_modality = {}
953-
for feature_name in six.iterkeys(p.modality):
953+
for feature_name in p.modality:
954954
reversed_feature_name = feature_name.replace("target", "input")
955955
if "target" in feature_name and reversed_feature_name in p.modality:
956956
reversed_modality[feature_name] = p.modality[reversed_feature_name]
@@ -962,7 +962,7 @@ def _reverse_problem_hparams(p_hparams):
962962

963963
# Swap vocab sizes.
964964
reversed_vocab_size = {}
965-
for feature_name in six.iterkeys(p.vocab_size):
965+
for feature_name in p.vocab_size:
966966
reversed_feature_name = feature_name.replace("target", "input")
967967
if "target" in feature_name and reversed_feature_name in p.vocab_size:
968968
reversed_vocab_size[feature_name] = p.vocab_size[reversed_feature_name]

tensor2tensor/layers/latent_layers_test.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from __future__ import print_function
2121

2222
import functools
23-
import six
2423

2524
from tensor2tensor.layers import common_image_attention as cia
2625
from tensor2tensor.layers import discretization
@@ -138,8 +137,7 @@ def testTransformerAutoencoder(self):
138137
decoder_output, losses, cache = latent_layers.transformer_autoencoder(
139138
inputs, targets, target_space_id, hparams)
140139

141-
self.assertEqual(set(six.iterkeys(losses)),
142-
{"extra", "extra_loss", "latent_pred"})
140+
self.assertEqual(set(losses), {"extra", "extra_loss", "latent_pred"})
143141

144142
self.evaluate(tf.global_variables_initializer())
145143
decoder_output_, extra_loss_, latent_pred_ = self.evaluate(
@@ -154,5 +152,6 @@ def testTransformerAutoencoder(self):
154152
self.assertAllGreaterEqual(latent_pred_, 0.)
155153
self.assertEqual(cache, None)
156154

155+
157156
if __name__ == "__main__":
158157
tf.test.main()

0 commit comments

Comments
 (0)