Skip to content

Commit

Permalink
Update Travis Tests for optimizers
Browse files Browse the repository at this point in the history
  • Loading branch information
torzdf committed May 25, 2021
1 parent 4c1631b commit 8c87ac5
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 12 deletions.
15 changes: 12 additions & 3 deletions lib/model/optimizers_plaid.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
#!/usr/bin/env python3
""" Custom Optimizers for PlaidML/Keras 2.2. """
import inspect
import sys

from keras import backend as K
from keras.optimizers import Optimizer
from keras.utils import get_custom_objects


class AdaBelief(Optimizer):
Expand Down Expand Up @@ -81,7 +84,7 @@ def get_updates(self, loss, params): # pylint:disable=too-many-locals
----------
loss: list
The loss to update
parans: list
params: list
The variables
"""
grads = self.get_gradients(loss, params)
Expand Down Expand Up @@ -129,8 +132,8 @@ def get_config(self):
""" Returns the config of the optimizer.
An optimizer config is a Python dictionary (serializable) containing the configuration of
an optimizer. The same optimizer can be reinstantiated later (without any saved state) from
this configuration.
an optimizer. The same optimizer can be re-instantiated later (without any saved state)
from this configuration.
Returns
-------
Expand All @@ -145,3 +148,9 @@ def get_config(self):
weight_decay=self.weight_decay)
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))


# Update layers into Keras custom objects
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and obj.__module__ == __name__:
get_custom_objects().update({name: obj})
25 changes: 17 additions & 8 deletions lib/model/optimizers_tf.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
#!/usr/bin/env python3
""" Custom Optimizers for TensorFlow 2.x/tf.keras """

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import inspect
import sys

import tensorflow as tf
from keras.utils import get_custom_objects


class AdaBelief(tf.keras.optimizers.Optimizer):
Expand Down Expand Up @@ -67,7 +70,7 @@ class AdaBelief(tf.keras.optimizers.Optimizer):
>>> new_optimizer = tf.keras.optimizers.deserialize(config,
... custom_objects=dict(AdaBelief=AdaBelief))
Example of warmup:
Example of warm up:
>>> opt = AdaBelief(lr=1e-3, total_steps=10000, warmup_proportion=0.1, min_lr=1e-5)
Expand Down Expand Up @@ -147,7 +150,7 @@ def _create_slots(self, var_list):
Parameters
----------
var_list: list
List of tf variables to create slots for
List of tensorflow variables to create slots for
"""
for var in var_list:
self.add_slot(var, "m")
Expand All @@ -158,9 +161,9 @@ def _create_slots(self, var_list):
def set_weights(self, weights):
""" Set the weights of the optimizer.
The weights of an optimizer are its state (ie, variables). This function takes the weight
The weights of an optimizer are its state (IE, variables). This function takes the weight
values associated with this optimizer as a list of Numpy arrays. The first value is always
the iterations count of the optimizer, followed by the optimizer's state variables in the
the iterations count of the optimizer, followed by the optimizers state variables in the
order they are created. The passed values are used to set the new state of the optimizer.
Parameters
Expand All @@ -180,7 +183,7 @@ def _decayed_wd(self, var_dtype):
Parameters
----------
var_dtype: str
The data type to to set up weight decau for
The data type to to set up weight decay for
Returns
-------
Expand Down Expand Up @@ -363,8 +366,8 @@ def get_config(self):
""" Returns the config of the optimizer.
An optimizer config is a Python dictionary (serializable) containing the configuration of
an optimizer. The same optimizer can be reinstantiated later (without any saved state) from
this configuration.
an optimizer. The same optimizer can be re-instantiated later (without any saved state)
from this configuration.
Returns
-------
Expand All @@ -385,3 +388,9 @@ def get_config(self):
warmup_proportion=self._serialize_hyperparameter("warmup_proportion"),
min_lr=self._serialize_hyperparameter("min_lr")))
return config


# Update layers into Keras custom objects
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and obj.__module__ == __name__:
get_custom_objects().update({name: obj})
3 changes: 2 additions & 1 deletion tests/lib/model/optimizers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def _test_optimizer(optimizer, target=0.75):
config = k_optimizers.serialize(optimizer)
optim = k_optimizers.deserialize(config)
new_config = k_optimizers.serialize(optim)
config["class_name"] = config["class_name"].lower()
new_config["class_name"] = new_config["class_name"].lower()
assert config == new_config

Expand Down Expand Up @@ -82,4 +83,4 @@ def test_adam(dummy): # pylint:disable=unused-argument
@pytest.mark.parametrize("dummy", [None], ids=[get_backend().upper()])
def test_adabelief(dummy): # pylint:disable=unused-argument
""" Test for custom Adam optimizer """
_test_optimizer(optimizers.AdaBelief(), target=0.6)
_test_optimizer(optimizers.AdaBelief(), target=0.5)

0 comments on commit 8c87ac5

Please sign in to comment.