Skip to content

Commit

Permalink
Merge pull request #48654 from szutenberg:relu_docs_fix
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 373678352
Change-Id: I176f0630817f94ee928cd739b2f2e824b1e91d82
  • Loading branch information
tensorflower-gardener committed May 13, 2021
2 parents 5392b4e + 73bd1ec commit 28c2152
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 23 deletions.
19 changes: 10 additions & 9 deletions tensorflow/python/keras/layers/advanced_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,20 +404,21 @@ class ReLU(Layer):
max_value: Float >= 0. Maximum activation value. Default to None, which
means unlimited.
negative_slope: Float >= 0. Negative slope coefficient. Default to 0.
threshold: Float. Threshold value for thresholded activation. Default to 0.
threshold: Float >= 0. Threshold value for thresholded activation. Default
to 0.
"""

def __init__(self, max_value=None, negative_slope=0, threshold=0, **kwargs):
super(ReLU, self).__init__(**kwargs)
if max_value is not None and max_value < 0.:
raise ValueError('max_value of Relu layer '
'cannot be negative value: ' + str(max_value))
if negative_slope < 0.:
raise ValueError('negative_slope of Relu layer '
'cannot be negative value: ' + str(negative_slope))
if threshold is None:
raise ValueError('threshold of Relu layer '
'cannot be None. Required a float')
raise ValueError('max_value of a ReLU layer cannot be a negative '
'value. Got: %s' % max_value)
if negative_slope is None or negative_slope < 0.:
raise ValueError('negative_slope of a ReLU layer cannot be a negative '
'value. Got: %s' % negative_slope)
if threshold is None or threshold < 0.:
raise ValueError('threshold of a ReLU layer cannot be a negative '
'value. Got: %s' % threshold)

self.supports_masking = True
if max_value is not None:
Expand Down
60 changes: 46 additions & 14 deletions tensorflow/python/keras/layers/advanced_activations_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,21 +78,53 @@ def test_relu(self):
# Test that we use `relu6` when appropriate in graph mode.
self.assertTrue('Relu6' in keras.layers.ReLU(max_value=6)(x).name)

def test_relu_with_invalid_arg(self):
def test_relu_with_invalid_max_value(self):
with self.assertRaisesRegex(
ValueError, 'max_value of Relu layer cannot be negative value: -10'):
testing_utils.layer_test(keras.layers.ReLU,
kwargs={'max_value': -10},
input_shape=(2, 3, 4),
supports_masking=True)
ValueError, 'max_value of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'max_value': -10},
input_shape=(2, 3, 4),
supports_masking=True)

def test_relu_with_invalid_negative_slope(self):
with self.assertRaisesRegex(
ValueError, 'negative_slope of a ReLU layer cannot be a negative '
'value. Got: None'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': None},
input_shape=(2, 3, 4),
supports_masking=True)

with self.assertRaisesRegex(
ValueError, 'negative_slope of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': -10},
input_shape=(2, 3, 4),
supports_masking=True)

def test_relu_with_invalid_threshold(self):
with self.assertRaisesRegex(
ValueError, 'threshold of a ReLU layer cannot be a negative '
'value. Got: None'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'threshold': None},
input_shape=(2, 3, 4),
supports_masking=True)

with self.assertRaisesRegex(
ValueError,
'negative_slope of Relu layer cannot be negative value: -2'):
with self.cached_session():
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': -2},
input_shape=(2, 3, 4))
ValueError, 'threshold of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'threshold': -10},
input_shape=(2, 3, 4),
supports_masking=True)

@keras_parameterized.run_with_all_model_types
def test_layer_as_activation(self):
Expand Down Expand Up @@ -126,7 +158,7 @@ def test_leaky_elu_with_invalid_alpha(self):
input_shape=(2, 3, 4),
supports_masking=True)

def test_threshold_relu_with_invalid_alpha(self):
def test_threshold_relu_with_invalid_theta(self):
with self.assertRaisesRegex(
ValueError, 'Theta of a Thresholded ReLU layer cannot '
'be None, requires a float. Got None'):
Expand Down

0 comments on commit 28c2152

Please sign in to comment.