Skip to content

Commit

Permalink
shorten all layers name
Browse files Browse the repository at this point in the history
  • Loading branch information
zsdonghao committed Dec 4, 2018
1 parent 6706764 commit 9e80b6d
Show file tree
Hide file tree
Showing 22 changed files with 685 additions and 309 deletions.
1 change: 1 addition & 0 deletions tensorlayer/layers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .core import *
from .dense import *
from .dropout import *
from .deprecated import *
from .extend import *
from .flow_control import *
from .image_resampling import *
Expand Down
30 changes: 15 additions & 15 deletions tensorlayer/layers/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@
from tensorlayer.decorators import deprecated_alias

__all__ = [
'PReluLayer',
'PRelu6Layer',
'PTRelu6Layer',
'PRelu',
'PRelu6',
'PTRelu6',
]


class PReluLayer(Layer):
class PRelu(Layer):
"""
The :class:`PReluLayer` class is Parametric Rectified Linear layer.
The :class:`PRelu` class is Parametric Rectified Linear layer.
Parameters
----------
Expand Down Expand Up @@ -50,7 +50,7 @@ def __init__(
a_init_args=None, name="PReluLayer"
):

super(PReluLayer,
super(PRelu,
self).__init__(prev_layer=prev_layer, act=tf.nn.leaky_relu, a_init_args=a_init_args, name=name)

if channel_shared:
Expand Down Expand Up @@ -79,11 +79,11 @@ def __init__(
self._add_params(alpha_var)


class PRelu6Layer(Layer):
class PRelu6(Layer):
"""
The :class:`PRelu6Layer` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
The :class:`PRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
This Layer is a modified version of the :class:`PReluLayer`.
This Layer is a modified version of the :class:`PRelu`.
This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
`Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
Expand Down Expand Up @@ -125,7 +125,7 @@ def __init__(
a_init_args=None, name="PReLU6_layer"
):

super(PRelu6Layer, self).__init__(prev_layer=prev_layer, act=leaky_relu6, a_init_args=a_init_args, name=name)
super(PRelu6, self).__init__(prev_layer=prev_layer, act=leaky_relu6, a_init_args=a_init_args, name=name)

if channel_shared:
w_shape = (1, )
Expand Down Expand Up @@ -153,11 +153,11 @@ def __init__(
self._add_params(alpha_var)


class PTRelu6Layer(Layer):
class PTRelu6(Layer):
"""
The :class:`PTRelu6Layer` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
The :class:`PTRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
This Layer is a modified version of the :class:`PReluLayer`.
This Layer is a modified version of the :class:`PRelu`.
This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
`Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
Expand All @@ -172,7 +172,7 @@ class PTRelu6Layer(Layer):
- When x in [0, 6]: ``f(x) = x``.
- When x > 6: ``f(x) = 6 + (alpha_high * (x-6))``.
This version goes one step beyond :class:`PRelu6Layer` by introducing leaky behaviour on the positive side when x > 6.
This version goes one step beyond :class:`PRelu6` by introducing leaky behaviour on the positive side when x > 6.
Parameters
----------
Expand Down Expand Up @@ -201,7 +201,7 @@ def __init__(
a_init_args=None, name="PTReLU6_layer"
):

super(PTRelu6Layer,
super(PTRelu6,
self).__init__(prev_layer=prev_layer, act=leaky_twice_relu6, a_init_args=a_init_args, name=name)

if channel_shared:
Expand Down

0 comments on commit 9e80b6d

Please sign in to comment.