Skip to content

Commit

Permalink
Compile
Browse files Browse the repository at this point in the history
  • Loading branch information
pchavanne committed Feb 27, 2017
1 parent 8b89078 commit 9124f52
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 1 deletion.
28 changes: 28 additions & 0 deletions tests/test_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,34 @@ def test_get_activation():
actual = f(x_val)
desired = x_val * (x_val > 0) + alpha * x_val * (x_val < 0)
assert_allclose(actual, desired, rtol=1e-5)
x = T.matrix('x')
activation = yadll.activations.get_activation('relu')
f = theano.function([x], activation(x))
actual = f(x_val)
desired = x_val * (x_val > 0)
assert_allclose(actual, desired, rtol=1e-5)
x = T.matrix('x')
alpha = 0.5
activation = yadll.activations.get_activation(('relu', {'alpha': alpha}))
f = theano.function([x], activation(x))
actual = f(x_val)
desired = x_val * (x_val > 0) + alpha * x_val * (x_val < 0)
assert_allclose(actual, desired, rtol=1e-5)


def test_activation_to_conf():
activation = yadll.activations.get_activation(yadll.activations.relu)
conf = yadll.activations.activation_to_conf(activation)
assert conf == ('relu', {})
activation = yadll.activations.get_activation((yadll.activations.relu, {'alpha': 0.5}))
conf = yadll.activations.activation_to_conf(activation)
assert conf == ('relu', {'alpha': 0.5})
activation = yadll.activations.get_activation('relu')
conf = yadll.activations.activation_to_conf(activation)
assert conf == ('relu', {})
activation = yadll.activations.get_activation(('relu', {'alpha': 0.5}))
conf = yadll.activations.activation_to_conf(activation)
assert conf == ('relu', {'alpha': 0.5})


def test_linear():
Expand Down
12 changes: 11 additions & 1 deletion tests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,8 +556,18 @@ def input_layer(self, input_data):
def layer(self, lstm, input_layer):
return lstm(incoming=input_layer, n_units=10)

def test_get_output(self, layer):
@pytest.fixture
def peepholes_layer(self, lstm, input_layer):
return lstm(incoming=input_layer, n_units=10, peepholes=True)

@pytest.fixture
def tied_layer(self, lstm, input_layer):
return lstm(incoming=input_layer, n_units=10, tied_i_f=True)

def test_get_output(self, layer, peepholes_layer, tied_layer):
output = layer.get_output().eval()
output = peepholes_layer.get_output().eval()
output = tied_layer.get_output().eval()


class TestGRU:
Expand Down

0 comments on commit 9124f52

Please sign in to comment.