Skip to content

Commit

Permalink
Merge pull request #1065 from isacarnekvist/master
Browse files Browse the repository at this point in the history
Addition of Leaky Rectifier ReLU
  • Loading branch information
dmitriy-serdyuk committed Apr 20, 2016
2 parents 0c9a029 + dd4bc25 commit 8878d0f
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 6 deletions.
10 changes: 5 additions & 5 deletions blocks/bricks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,15 @@
from .interfaces import (Activation, Feedforward, Initializable, LinearLike,
Random)
from .simple import (Linear, Bias, Maxout, LinearMaxout, Identity, Tanh,
Logistic, Softplus, Rectifier, Softmax,
NDimensionalSoftmax)
Logistic, Softplus, Rectifier, LeakyRectifier,
Softmax, NDimensionalSoftmax)
from .sequences import Sequence, FeedforwardSequence, MLP
from .wrappers import WithExtraDims

__all__ = ('application', 'Brick', 'lazy', 'BatchNormalization',
'SpatialBatchNormalization', 'BatchNormalizedMLP',
'Activation', 'Feedforward', 'Initializable', 'LinearLike',
'Random', 'Linear', 'Bias', 'Maxout', 'LinearMaxout', 'Identity',
'Tanh', 'Logistic', 'Softplus', 'Rectifier', 'Softmax',
'NDimensionalSoftmax', 'Sequence', 'FeedforwardSequence',
'MLP', 'WithExtraDims')
'Tanh', 'Logistic', 'Softplus', 'Rectifier', 'LeakyRectifier',
'Softmax', 'NDimensionalSoftmax', 'Sequence',
'FeedforwardSequence', 'MLP', 'WithExtraDims')
26 changes: 26 additions & 0 deletions blocks/bricks/simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,32 @@ def apply(self, input_):
return tensor.switch(input_ > 0, input_, 0)


class LeakyRectifier(Activation):
r"""Leaky ReLU
Like Rectifier, but inputs are scaled by small constant for negative
inputs.
.. math:: f(x) = \text{max}(x, ax)
Parameters
----------
leak : float, optional
The scalar to multiply negative values by. Named 'a' above.
.. Maas, Andrew L., Awni Y. Hannun, and Andrew Y. Ng. Rectifier
nonlinearities improve neural network acoustic models. Proc.
ICML. Vol. 30. 2013.
"""
def __init__(self, leak=0.01, **kwargs):
super(LeakyRectifier, self).__init__(**kwargs)
self._leak = leak

@application(inputs=['input_'], outputs=['output'])
def apply(self, input_):
return tensor.switch(input_ > 0, input_, self._leak * input_)


class Softmax(Brick):
"""A softmax brick.
Expand Down
11 changes: 10 additions & 1 deletion tests/bricks/test_bricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from six.moves import cPickle

from blocks.bricks import (Identity, Linear, Maxout, LinearMaxout, MLP, Tanh,
Sequence, Random, Logistic, Softplus, Softmax)
Sequence, Random, Logistic, Softplus, Softmax,
LeakyRectifier)
from blocks.bricks.base import application, Brick, lazy, NoneAllocation
from blocks.bricks.parallel import Parallel, Fork
from blocks.filter import get_application_call, get_brick
Expand Down Expand Up @@ -339,6 +340,14 @@ def test_activations():
Softmax(x).apply(x).eval({x: x_val}).flatten(), rtol=1e-6)
assert_allclose(1.0 / (1.0 + numpy.exp(-x_val)),
Logistic(x).apply(x).eval({x: x_val}), rtol=1e-6)
leaky_out_1 = x_val - 0.5
leaky_out_1[leaky_out_1 < 0] *= 0.01
assert_allclose(leaky_out_1,
LeakyRectifier().apply(x).eval({x: x_val - 0.5}))
leaky_out_2 = x_val - 0.5
leaky_out_2[leaky_out_2 < 0] *= 0.05
assert_allclose(leaky_out_2,
LeakyRectifier(leak=0.05).apply(x).eval({x: x_val - 0.5}))


def test_mlp():
Expand Down

0 comments on commit 8878d0f

Please sign in to comment.