Skip to content

Commit 7bb76e0

Browse files
authored
fixed typo in multioptimizer class name and added code owners (tensorflow#2164)
1 parent a454abe commit 7bb76e0

File tree

4 files changed

+9
-7
lines changed

4 files changed

+9
-7
lines changed

.github/CODEOWNERS

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,8 @@
146146
/tensorflow_addons/optimizers/tests/conditional_gradient_test.py @pkan2 @lokhande-vishnu
147147
/tensorflow_addons/optimizers/cyclical_learning_rate.py @raphaelmeudec
148148
/tensorflow_addons/optimizers/tests/cyclical_learning_rate_test.py @raphaelmeudec
149+
/tensorflow_addons/optimizers/discriminative_layer_training.py @hyang0129
150+
/tensorflow_addons/optimizers/tests/discriminative_layer_training_test.py @hyang0129
149151
/tensorflow_addons/optimizers/lamb.py @junjiek
150152
/tensorflow_addons/optimizers/tests/lamb_test.py @junjiek
151153
/tensorflow_addons/optimizers/lazy_adam.py @ssaishruthi

tensorflow_addons/optimizers/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
ExponentialCyclicalLearningRate,
2828
)
2929
from tensorflow_addons.optimizers.discriminative_layer_training import (
30-
MultiOptimzer,
30+
MultiOptimizer,
3131
)
3232
from tensorflow_addons.optimizers.lamb import LAMB
3333
from tensorflow_addons.optimizers.lazy_adam import LazyAdam

tensorflow_addons/optimizers/discriminative_layer_training.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121

2222

2323
@tf.keras.utils.register_keras_serializable(package="Addons")
24-
class MultiOptimzer(tf.keras.optimizers.Optimizer):
24+
class MultiOptimizer(tf.keras.optimizers.Optimizer):
2525
"""Multi Optimizer Wrapper for Discriminative Layer Training.
2626
2727
Creates a wrapper around a set of instantiated optimizer layer pairs. Generally useful for transfer learning
@@ -84,7 +84,7 @@ def __init__(
8484
**kwargs
8585
):
8686

87-
super(MultiOptimzer, self).__init__(name, **kwargs)
87+
super(MultiOptimizer, self).__init__(name, **kwargs)
8888

8989
if optimizer_specs is None and optimizers_and_layers is not None:
9090
self.optimizer_specs = [
@@ -126,7 +126,7 @@ def apply_gradients(self, grads_and_vars, name=None, **kwargs):
126126
)
127127

128128
def get_config(self):
129-
config = super(MultiOptimzer, self).get_config()
129+
config = super(MultiOptimizer, self).get_config()
130130
config.update({"optimizer_specs": self.optimizer_specs})
131131
return config
132132

tensorflow_addons/optimizers/tests/discriminative_layer_training_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import numpy as np
1919
import tensorflow as tf
2020

21-
from tensorflow_addons.optimizers.discriminative_layer_training import MultiOptimzer
21+
from tensorflow_addons.optimizers.discriminative_layer_training import MultiOptimizer
2222
from tensorflow_addons.utils import test_utils
2323

2424

@@ -66,7 +66,7 @@ def test_fit_layer_optimizer(dtype, device, serialize):
6666
opt_layer_pairs = [(opt1, model.layers[0]), (opt2, model.layers[1])]
6767

6868
loss = tf.keras.losses.MSE
69-
optimizer = MultiOptimzer(opt_layer_pairs)
69+
optimizer = MultiOptimizer(opt_layer_pairs)
7070

7171
model.compile(optimizer=optimizer, loss=loss)
7272

@@ -106,7 +106,7 @@ def test_serialization():
106106

107107
opt_layer_pairs = [(opt1, model.layers[0]), (opt2, model.layers[1])]
108108

109-
optimizer = MultiOptimzer(opt_layer_pairs)
109+
optimizer = MultiOptimizer(opt_layer_pairs)
110110
config = tf.keras.optimizers.serialize(optimizer)
111111

112112
new_optimizer = tf.keras.optimizers.deserialize(config)

0 commit comments

Comments
 (0)