Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TF Framework regularizers_test missing test case add #28542

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
122 changes: 100 additions & 22 deletions tensorflow/python/keras/regularizers_test.py
Expand Up @@ -22,8 +22,9 @@
import numpy as np

from tensorflow.python import keras
from tensorflow.python.framework import test_util
from tensorflow.python.eager import context
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import regularizers
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
Expand All @@ -33,7 +34,8 @@
NUM_CLASSES = 2


class KerasRegularizersTest(keras_parameterized.TestCase):
class KerasRegularizersTest(keras_parameterized.TestCase,
parameterized.TestCase):

def create_model(self, kernel_regularizer=None, activity_regularizer=None):
model = keras.models.Sequential()
Expand All @@ -53,34 +55,49 @@ def get_data(self):
y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)
return (x_train, y_train), (x_test, y_test)

def create_multi_input_model_from(self, layer1, layer2):
input_1 = keras.layers.Input(shape=(DATA_DIM,))
input_2 = keras.layers.Input(shape=(DATA_DIM,))
out1 = layer1(input_1)
out2 = layer2(input_2)
out = keras.layers.Average()([out1, out2])
model = keras.models.Model([input_1, input_2], out)
model.add_loss(keras.backend.mean(out2))
model.add_loss(math_ops.reduce_sum(input_1))
return model

@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters([
('l1', keras.regularizers.l1()),
('l2', keras.regularizers.l2()),
('l1_l2', keras.regularizers.l1_l2()),
('l1', regularizers.l1()),
('l2', regularizers.l2()),
('l1_l2', regularizers.l1_l2()),
])
def test_kernel_regularization(self, regularizer):
with self.cached_session():
(x_train, y_train), _ = self.get_data()
model = self.create_model(kernel_regularizer=regularizer)
model.compile(loss='categorical_crossentropy', optimizer='sgd')
assert len(model.losses) == 1
model.fit(x_train, y_train, batch_size=10,
epochs=1, verbose=0)
(x_train, y_train), _ = self.get_data()
model = self.create_model(kernel_regularizer=regularizer)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.assertEqual(len(model.losses), 1)
model.fit(x_train, y_train, batch_size=10, epochs=1, verbose=0)

@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters([
('l1', keras.regularizers.l1()),
('l2', keras.regularizers.l2()),
('l1', regularizers.l1()),
('l2', regularizers.l2()),
('l1_l2', regularizers.l1_l2()),
('l2_zero', keras.regularizers.l2(0.)),
])
@test_util.deprecated_graph_mode_only
def test_activity_regularization(self, regularizer):
with self.cached_session():
(x_train, y_train), _ = self.get_data()
model = self.create_model(activity_regularizer=regularizer)
model.compile(loss='categorical_crossentropy', optimizer='sgd')
assert len(model.losses) == 1
model.fit(x_train, y_train, batch_size=10,
epochs=1, verbose=0)
(x_train, y_train), _ = self.get_data()
model = self.create_model(activity_regularizer=regularizer)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.assertEqual(len(model.losses),
1 if context.executing_eagerly() else 1)
model.fit(x_train, y_train, batch_size=10,
epochs=1, verbose=0)

@keras_parameterized.run_all_keras_modes
@keras_parameterized.run_with_all_model_types
Expand All @@ -105,6 +122,67 @@ def my_regularizer(weights):
model.get_config(), custom_objects={'my_regularizer': my_regularizer})
self.assertEqual(model2.layers[1].kernel_regularizer, my_regularizer)

@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters([
('l1', regularizers.l1()),
('l2', regularizers.l2()),
('l1_l2', regularizers.l1_l2()),
])
def test_regularization_shared_layer(self, regularizer):
dense_layer = keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=regularizer,
activity_regularizer=regularizer)
model = self.create_multi_input_model_from(dense_layer, dense_layer)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.assertEqual(len(model.losses), 5)

@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters([
('l1', regularizers.l1()),
('l2', regularizers.l2()),
('l1_l2', regularizers.l1_l2()),
])
def test_regularization_shared_model(self, regularizer):
dense_layer = keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=regularizer,
activity_regularizer=regularizer)

input_tensor = keras.layers.Input(shape=(DATA_DIM,))
dummy_model = keras.models.Model(input_tensor, dense_layer(input_tensor))

model = self.create_multi_input_model_from(dummy_model, dummy_model)
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.assertEqual(len(model.losses), 6)

@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters([
('l1', regularizers.l1()),
('l2', regularizers.l2()),
('l1_l2', regularizers.l1_l2()),
])
def test_regularization_shared_layer_in_different_models(self, regularizer):
shared_dense = keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=regularizer,
activity_regularizer=regularizer)
models = []
for _ in range(2):
input_tensor = keras.layers.Input(shape=(DATA_DIM,))
unshared_dense = keras.layers.Dense(NUM_CLASSES,
kernel_regularizer=regularizer)
out = unshared_dense(shared_dense(input_tensor))
models.append(keras.models.Model(input_tensor, out))

model = self.create_multi_input_model_from(layer1=models[0],
layer2=models[1])
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.assertEqual(len(model.losses), 14)


if __name__ == '__main__':
test.main()