Skip to content

Commit

Permalink
Improve checks in keras.layers.ReLU #48646
Browse files Browse the repository at this point in the history
- added checking if negative_slope is None
- added checking if threshold is negative
- updated respective unit tests
- renamed test_threshold_relu_with_invalid_alpha to
  test_threshold_relu_with_invalid_theta
  • Loading branch information
szutenberg committed Apr 26, 2021
1 parent 787ad8c commit 73bd1ec
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 22 deletions.
16 changes: 8 additions & 8 deletions tensorflow/python/keras/layers/advanced_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,14 +411,14 @@ class ReLU(Layer):
def __init__(self, max_value=None, negative_slope=0, threshold=0, **kwargs):
super(ReLU, self).__init__(**kwargs)
if max_value is not None and max_value < 0.:
raise ValueError('max_value of Relu layer '
'cannot be negative value: ' + str(max_value))
if negative_slope < 0.:
raise ValueError('negative_slope of Relu layer '
'cannot be negative value: ' + str(negative_slope))
if threshold is None:
raise ValueError('threshold of Relu layer '
'cannot be None. Required a float')
raise ValueError('max_value of a ReLU layer cannot be a negative '
'value. Got: %s' % max_value)
if negative_slope is None or negative_slope < 0.:
raise ValueError('negative_slope of a ReLU layer cannot be a negative '
'value. Got: %s' % negative_slope)
if threshold is None or threshold < 0.:
raise ValueError('threshold of a ReLU layer cannot be a negative '
'value. Got: %s' % threshold)

self.supports_masking = True
if max_value is not None:
Expand Down
60 changes: 46 additions & 14 deletions tensorflow/python/keras/layers/advanced_activations_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,21 +78,53 @@ def test_relu(self):
# Test that we use `relu6` when appropriate in graph mode.
self.assertTrue('Relu6' in keras.layers.ReLU(max_value=6)(x).name)

def test_relu_with_invalid_arg(self):
def test_relu_with_invalid_max_value(self):
with self.assertRaisesRegex(
ValueError, 'max_value of Relu layer cannot be negative value: -10'):
testing_utils.layer_test(keras.layers.ReLU,
kwargs={'max_value': -10},
input_shape=(2, 3, 4),
supports_masking=True)
ValueError, 'max_value of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'max_value': -10},
input_shape=(2, 3, 4),
supports_masking=True)

def test_relu_with_invalid_negative_slope(self):
with self.assertRaisesRegex(
ValueError, 'negative_slope of a ReLU layer cannot be a negative '
'value. Got: None'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': None},
input_shape=(2, 3, 4),
supports_masking=True)

with self.assertRaisesRegex(
ValueError, 'negative_slope of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': -10},
input_shape=(2, 3, 4),
supports_masking=True)

def test_relu_with_invalid_threshold(self):
with self.assertRaisesRegex(
ValueError, 'threshold of a ReLU layer cannot be a negative '
'value. Got: None'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'threshold': None},
input_shape=(2, 3, 4),
supports_masking=True)

with self.assertRaisesRegex(
ValueError,
'negative_slope of Relu layer cannot be negative value: -2'):
with self.cached_session():
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': -2},
input_shape=(2, 3, 4))
ValueError, 'threshold of a ReLU layer cannot be a negative '
'value. Got: -10'):
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'threshold': -10},
input_shape=(2, 3, 4),
supports_masking=True)

@keras_parameterized.run_with_all_model_types
def test_layer_as_activation(self):
Expand Down Expand Up @@ -126,7 +158,7 @@ def test_leaky_elu_with_invalid_alpha(self):
input_shape=(2, 3, 4),
supports_masking=True)

def test_threshold_relu_with_invalid_alpha(self):
def test_threshold_relu_with_invalid_theta(self):
with self.assertRaisesRegex(
ValueError, 'Theta of a Thresholded ReLU layer cannot '
'be None, requires a float. Got None'):
Expand Down

0 comments on commit 73bd1ec

Please sign in to comment.