Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Preliminary support for exponential linear units.
Browse files Browse the repository at this point in the history
  • Loading branch information
alexjc committed Nov 26, 2015
1 parent e0c553c commit b29272a
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 4 deletions.
7 changes: 6 additions & 1 deletion sknn/backend/lasagne/mlp.py
Expand Up @@ -28,6 +28,10 @@
from ...nn import Layer, Convolution, ansi


def explin(x):
return x * (x>=0) + (x<0) * (T.exp(x) - 1)


class MultiLayerPerceptronBackend(BaseBackend):
"""
Abstract base class for wrapping the multi-layer perceptron functionality
Expand Down Expand Up @@ -98,7 +102,8 @@ def _get_activation(self, l):
'Sigmoid': nl.sigmoid,
'Tanh': nl.tanh,
'Softmax': nl.softmax,
'Linear': nl.linear}
'Linear': nl.linear,
'ExpLin': explin}

assert l.type in nonlinearities,\
"Layer type `%s` is not supported for `%s`." % (l.type, l.name)
Expand Down
2 changes: 1 addition & 1 deletion sknn/nn.py
Expand Up @@ -89,7 +89,7 @@ def __init__(
assert warning is None,\
"Specify layer parameters as keyword arguments, not positional arguments."

if type not in ['Rectifier', 'Sigmoid', 'Tanh', 'Linear', 'Softmax', 'Gaussian']:
if type not in ['Rectifier', 'Sigmoid', 'Tanh', 'Linear', 'Softmax', 'Gaussian', 'ExpLin']:
raise NotImplementedError("Layer type `%s` is not implemented." % type)

self.name = name
Expand Down
4 changes: 2 additions & 2 deletions sknn/tests/test_deep.py
Expand Up @@ -23,6 +23,7 @@ def setUp(self):
layers=[
L("Rectifier", units=16),
L("Sigmoid", units=12),
L("ExpLin", units=8),
L("Tanh", units=4),
L("Linear")],
n_iter=1)
Expand All @@ -45,7 +46,7 @@ def setUp(self):
def run_EqualityTest(self, copier, asserter):
# Only PyLearn2 supports Maxout.
extra = ["Maxout"] if sknn.backend.name == 'pylearn2' else []
for activation in ["Rectifier", "Sigmoid", "Tanh"] + extra:
for activation in ["Rectifier", "Sigmoid", "Tanh", "ExpLin"] + extra:
nn1 = MLPR(layers=[L(activation, units=16, pieces=2), L("Linear", units=1)], random_state=1234)
nn1._initialize(self.a_in, self.a_out)

Expand Down Expand Up @@ -105,4 +106,3 @@ def test_UnusedParameterWarning(self):

assert_in('Parameter `pieces` is unused', self.buf.getvalue())
self.buf = io.StringIO() # clear

0 comments on commit b29272a

Please sign in to comment.