Skip to content

Commit

Permalink
Various tests parametrizations (#12017)
Browse files Browse the repository at this point in the history
* remove test case as code sections are already there in first test.

* parametrize convolutional_test

* parametrize advanced activations

* parametrize core

* parametrize embeddings

* fix linter

* keep auto doc test
  • Loading branch information
RaphaelMeudec authored and fchollet committed Jan 11, 2019
1 parent 5c934bd commit fd537c7
Show file tree
Hide file tree
Showing 4 changed files with 190 additions and 200 deletions.
26 changes: 12 additions & 14 deletions tests/keras/layers/advanced_activations_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,14 @@
from keras import backend as K


def test_leaky_relu():
for alpha in [0., .5, -1.]:
layer_test(layers.LeakyReLU, kwargs={'alpha': alpha},
input_shape=(2, 3, 4))
@pytest.mark.parametrize('activation_layer',
[layers.LeakyReLU,
layers.ELU])
@pytest.mark.parametrize('alpha', [0., .5, -1.])
def test_linear_unit_activations(activation_layer,
alpha):
layer_test(activation_layer, kwargs={'alpha': alpha},
input_shape=(2, 3, 4))


def test_prelu():
Expand All @@ -20,21 +24,15 @@ def test_prelu_share():
input_shape=(2, 3, 4))


def test_elu():
for alpha in [0., .5, -1.]:
layer_test(layers.ELU, kwargs={'alpha': alpha},
input_shape=(2, 3, 4))


def test_thresholded_relu():
layer_test(layers.ThresholdedReLU, kwargs={'theta': 0.5},
input_shape=(2, 3, 4))


def test_softmax():
for axis in [1, -1]:
layer_test(layers.Softmax, kwargs={'axis': axis},
input_shape=(2, 3, 4))
@pytest.mark.parametrize('axis', [1, -1])
def test_softmax(axis):
layer_test(layers.Softmax, kwargs={'axis': axis},
input_shape=(2, 3, 4))


def test_relu():
Expand Down
Loading

0 comments on commit fd537c7

Please sign in to comment.