Skip to content

fixed typo in multioptimizer class name and added code owners #2164

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 14, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,8 @@
/tensorflow_addons/optimizers/tests/conditional_gradient_test.py @pkan2 @lokhande-vishnu
/tensorflow_addons/optimizers/cyclical_learning_rate.py @raphaelmeudec
/tensorflow_addons/optimizers/tests/cyclical_learning_rate_test.py @raphaelmeudec
/tensorflow_addons/optimizers/discriminative_layer_training.py @hyang0129
/tensorflow_addons/optimizers/tests/discriminative_layer_training_test.py @hyang0129
/tensorflow_addons/optimizers/lamb.py @junjiek
/tensorflow_addons/optimizers/tests/lamb_test.py @junjiek
/tensorflow_addons/optimizers/lazy_adam.py @ssaishruthi
Expand Down
2 changes: 1 addition & 1 deletion tensorflow_addons/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
ExponentialCyclicalLearningRate,
)
from tensorflow_addons.optimizers.discriminative_layer_training import (
MultiOptimzer,
MultiOptimizer,
)
from tensorflow_addons.optimizers.lamb import LAMB
from tensorflow_addons.optimizers.lazy_adam import LazyAdam
Expand Down
6 changes: 3 additions & 3 deletions tensorflow_addons/optimizers/discriminative_layer_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


@tf.keras.utils.register_keras_serializable(package="Addons")
class MultiOptimzer(tf.keras.optimizers.Optimizer):
class MultiOptimizer(tf.keras.optimizers.Optimizer):
"""Multi Optimizer Wrapper for Discriminative Layer Training.

Creates a wrapper around a set of instantiated optimizer layer pairs. Generally useful for transfer learning
Expand Down Expand Up @@ -84,7 +84,7 @@ def __init__(
**kwargs
):

super(MultiOptimzer, self).__init__(name, **kwargs)
super(MultiOptimizer, self).__init__(name, **kwargs)

if optimizer_specs is None and optimizers_and_layers is not None:
self.optimizer_specs = [
Expand Down Expand Up @@ -126,7 +126,7 @@ def apply_gradients(self, grads_and_vars, name=None, **kwargs):
)

def get_config(self):
config = super(MultiOptimzer, self).get_config()
config = super(MultiOptimizer, self).get_config()
config.update({"optimizer_specs": self.optimizer_specs})
return config

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import numpy as np
import tensorflow as tf

from tensorflow_addons.optimizers.discriminative_layer_training import MultiOptimzer
from tensorflow_addons.optimizers.discriminative_layer_training import MultiOptimizer
from tensorflow_addons.utils import test_utils


Expand Down Expand Up @@ -66,7 +66,7 @@ def test_fit_layer_optimizer(dtype, device, serialize):
opt_layer_pairs = [(opt1, model.layers[0]), (opt2, model.layers[1])]

loss = tf.keras.losses.MSE
optimizer = MultiOptimzer(opt_layer_pairs)
optimizer = MultiOptimizer(opt_layer_pairs)

model.compile(optimizer=optimizer, loss=loss)

Expand Down Expand Up @@ -106,7 +106,7 @@ def test_serialization():

opt_layer_pairs = [(opt1, model.layers[0]), (opt2, model.layers[1])]

optimizer = MultiOptimzer(opt_layer_pairs)
optimizer = MultiOptimizer(opt_layer_pairs)
config = tf.keras.optimizers.serialize(optimizer)

new_optimizer = tf.keras.optimizers.deserialize(config)
Expand Down