Skip to content

Commit

Permalink
fix snippets
Browse files Browse the repository at this point in the history
  • Loading branch information
aymericdamien committed Feb 22, 2017
1 parent ec14cd3 commit 090dc5b
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions tflearn/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,8 @@ def leaky_relu(x, alpha=0.1, name="LeakyReLU"):
leakyrelu = leaky_relu


def prelu(x, channel_shared=False, weights_init='zeros', restore=True,
trainable=True, reuse=False, scope=None, name="PReLU"):
def prelu(x, channel_shared=False, weights_init='zeros', trainable=True,
restore=True, reuse=False, scope=None, name="PReLU"):
""" PReLU.
Parametric Rectified Linear Unit.
Expand All @@ -195,8 +195,10 @@ def prelu(x, channel_shared=False, weights_init='zeros', restore=True,
`int16`, or `int8`.
channel_shared: `bool`. Single weight is shared by all channels
weights_init: `str`. Weights initialization. Default: zeros.
trainable: `bool`. If True, weights will be trainable.
restore: `bool`. Restore or not alphas.
reuse:
reuse: `bool`. If True and 'scope' is provided, this layer variables
will be reused (shared).
name: A name for this activation op (optional).
Attributes:
Expand Down

0 comments on commit 090dc5b

Please sign in to comment.