Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

modernize Python in losses.py and its tests #47873

Merged
merged 10 commits into from
Mar 30, 2021
61 changes: 23 additions & 38 deletions tensorflow/python/keras/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,10 @@
# limitations under the License.
# ==============================================================================
"""Built-in loss functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import abc
import functools

import six

from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribution_strategy_context
Expand Down Expand Up @@ -50,7 +45,7 @@


@keras_export('keras.losses.Loss')
class Loss(object):
class Loss:
"""Loss base class.

To be implemented by subclasses:
Expand Down Expand Up @@ -241,7 +236,7 @@ def __init__(self,
name: (Optional) name for the loss.
**kwargs: The keyword arguments that are passed on to `fn`.
"""
super(LossFunctionWrapper, self).__init__(reduction=reduction, name=name)
super().__init__(reduction=reduction, name=name)
self.fn = fn
self._fn_kwargs = kwargs

Expand All @@ -263,9 +258,9 @@ def call(self, y_true, y_pred):

def get_config(self):
config = {}
for k, v in six.iteritems(self._fn_kwargs):
for k, v in self._fn_kwargs.items():
config[k] = K.eval(v) if tf_utils.is_tensor_or_variable(v) else v
base_config = super(LossFunctionWrapper, self).get_config()
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))


Expand Down Expand Up @@ -324,8 +319,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'mean_squared_error'.
"""
super(MeanSquaredError, self).__init__(
mean_squared_error, name=name, reduction=reduction)
super().__init__(mean_squared_error, name=name, reduction=reduction)


@keras_export('keras.losses.MeanAbsoluteError')
Expand Down Expand Up @@ -383,8 +377,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'mean_absolute_error'.
"""
super(MeanAbsoluteError, self).__init__(
mean_absolute_error, name=name, reduction=reduction)
super().__init__(mean_absolute_error, name=name, reduction=reduction)


@keras_export('keras.losses.MeanAbsolutePercentageError')
Expand Down Expand Up @@ -444,7 +437,7 @@ def __init__(self,
name: Optional name for the op. Defaults to
'mean_absolute_percentage_error'.
"""
super(MeanAbsolutePercentageError, self).__init__(
super().__init__(
mean_absolute_percentage_error, name=name, reduction=reduction)


Expand Down Expand Up @@ -505,7 +498,7 @@ def __init__(self,
name: Optional name for the op. Defaults to
'mean_squared_logarithmic_error'.
"""
super(MeanSquaredLogarithmicError, self).__init__(
super().__init__(
mean_squared_logarithmic_error, name=name, reduction=reduction)


Expand Down Expand Up @@ -599,7 +592,7 @@ def __init__(self,
more details.
name: (Optional) Name for the op. Defaults to 'binary_crossentropy'.
"""
super(BinaryCrossentropy, self).__init__(
super().__init__(
binary_crossentropy,
name=name,
reduction=reduction,
Expand Down Expand Up @@ -678,7 +671,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'categorical_crossentropy'.
"""
super(CategoricalCrossentropy, self).__init__(
super().__init__(
categorical_crossentropy,
name=name,
reduction=reduction,
Expand Down Expand Up @@ -755,7 +748,7 @@ def __init__(self,
name: Optional name for the op. Defaults to
'sparse_categorical_crossentropy'.
"""
super(SparseCategoricalCrossentropy, self).__init__(
super().__init__(
sparse_categorical_crossentropy,
name=name,
reduction=reduction,
Expand Down Expand Up @@ -818,7 +811,7 @@ def __init__(self, reduction=losses_utils.ReductionV2.AUTO, name='hinge'):
more details.
name: Optional name for the op. Defaults to 'hinge'.
"""
super(Hinge, self).__init__(hinge, name=name, reduction=reduction)
super().__init__(hinge, name=name, reduction=reduction)


@keras_export('keras.losses.SquaredHinge')
Expand Down Expand Up @@ -879,8 +872,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'squared_hinge'.
"""
super(SquaredHinge, self).__init__(
squared_hinge, name=name, reduction=reduction)
super().__init__(squared_hinge, name=name, reduction=reduction)


@keras_export('keras.losses.CategoricalHinge')
Expand Down Expand Up @@ -939,8 +931,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'categorical_hinge'.
"""
super(CategoricalHinge, self).__init__(
categorical_hinge, name=name, reduction=reduction)
super().__init__(categorical_hinge, name=name, reduction=reduction)


@keras_export('keras.losses.Poisson')
Expand Down Expand Up @@ -996,7 +987,7 @@ def __init__(self, reduction=losses_utils.ReductionV2.AUTO, name='poisson'):
more details.
name: Optional name for the op. Defaults to 'poisson'.
"""
super(Poisson, self).__init__(poisson, name=name, reduction=reduction)
super().__init__(poisson, name=name, reduction=reduction)


@keras_export('keras.losses.LogCosh')
Expand Down Expand Up @@ -1053,7 +1044,7 @@ def __init__(self, reduction=losses_utils.ReductionV2.AUTO, name='log_cosh'):
more details.
name: Optional name for the op. Defaults to 'log_cosh'.
"""
super(LogCosh, self).__init__(log_cosh, name=name, reduction=reduction)
super().__init__(log_cosh, name=name, reduction=reduction)


@keras_export('keras.losses.KLDivergence')
Expand Down Expand Up @@ -1113,8 +1104,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'kl_divergence'.
"""
super(KLDivergence, self).__init__(
kl_divergence, name=name, reduction=reduction)
super().__init__(kl_divergence, name=name, reduction=reduction)


@keras_export('keras.losses.Huber')
Expand Down Expand Up @@ -1181,8 +1171,7 @@ def __init__(self,
more details.
name: Optional name for the op. Defaults to 'huber_loss'.
"""
super(Huber, self).__init__(
huber, name=name, reduction=reduction, delta=delta)
super().__init__(huber, name=name, reduction=reduction, delta=delta)


@keras_export('keras.metrics.mean_squared_error', 'keras.metrics.mse',
Expand Down Expand Up @@ -1952,7 +1941,7 @@ def __init__(self,
axis=-1,
reduction=losses_utils.ReductionV2.AUTO,
name='cosine_similarity'):
super(CosineSimilarity, self).__init__(
super().__init__(
cosine_similarity, reduction=reduction, name=name, axis=axis)


Expand Down Expand Up @@ -2046,16 +2035,12 @@ def get(identifier):
"""
if identifier is None:
return None
if isinstance(identifier, six.string_types):
identifier = str(identifier)
return deserialize(identifier)
if isinstance(identifier, dict):
if isinstance(identifier, str) or isinstance(identifier, dict):
return deserialize(identifier)
elif callable(identifier):
if callable(identifier):
return identifier
else:
raise ValueError(
'Could not interpret loss function identifier: {}'.format(identifier))
raise ValueError(
f'Could not interpret loss function identifier: {identifier}')


LABEL_DTYPES_FOR_LOSSES = {
Expand Down
6 changes: 1 addition & 5 deletions tensorflow/python/keras/losses_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@
# ==============================================================================
"""Tests for Keras loss functions."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from absl.testing import parameterized
import numpy as np

Expand Down Expand Up @@ -1787,7 +1783,7 @@ def test_loss_with_non_default_dtype(self):
class BinaryTruePositivesViaControlFlow(losses.Loss):

def __init__(self, reduction=losses_utils.ReductionV2.AUTO):
super(BinaryTruePositivesViaControlFlow, self).__init__(reduction=reduction)
super().__init__(reduction=reduction)

def call(self, y_true, y_pred):
y_true = math_ops.cast(y_true, dtypes.bool)
Expand Down