Skip to content

Commit

Permalink
fixed decorator bug
Browse files Browse the repository at this point in the history
  • Loading branch information
Aaron Tuor committed Sep 11, 2016
1 parent 6c41dd8 commit 5bbf1d6
Show file tree
Hide file tree
Showing 5 changed files with 78 additions and 45 deletions.
106 changes: 70 additions & 36 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions antk/core/node_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def convolutional_net(in_progress=None):
@neural_net
def residual_dnn(tensor_in, hidden_units, activation='tanh', distribution='tnorm',
initrange=1.0, l2=0.0, bn=False, keep_prob=None, fan_scaling=False,
skiplayers=3, name='dnn'):
skiplayers=3, name='residual_dnn'):
"""
Creates residual neural network with shortcut connections.
`Deep Residual Learning for Image Recognition`_
Expand Down Expand Up @@ -392,7 +392,7 @@ def highway_dnn(tensor_in, hidden_units, activation='tanh', distribution='tnorm'
return tensor_in

@node_op
def dropout(tensor_in, prob, name=None):
def dropout(tensor_in, prob, name='Dropout'):
"""
Adds dropout node. Adapted from skflow `dropout_ops.py`_ .
`Dropout A Simple Way to Prevent Neural Networks from Overfitting`_
Expand Down
Binary file modified antk/core/node_ops.pyc
Binary file not shown.
13 changes: 6 additions & 7 deletions antk/lib/decorate.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ def new_function(*args, **kwargs):
if defaults:
keyword_args = merge_dict(defaults, keyword_args)
if 'name' in keyword_args:
with tf.variable_scope(keyword_args['name']):
tensor_out = func(*args, **kwargs)
tensor_out = func(*args, **kwargs)

def node_repr(tensor_node):
return 'Tensor("%s", shape=%s, dtype=%r)' % (tensor_node.name,
Expand Down Expand Up @@ -75,11 +74,11 @@ def tanhlecun(tensor_in, name='tanhlecun'):
return 1.7159*tf.nn.tanh((2.0/3.0) * tensor_in)


sigmoid = act(tf.nn.sigmoid)
tanh = act(tf.nn.tanh)
relu = act(tf.nn.relu)
relu6 = act(tf.nn.relu6)
softplus = act(tf.nn.softplus)
sigmoid = tf.nn.sigmoid
tanh = tf.nn.tanh
relu = tf.nn.relu
relu6 = tf.nn.relu6
softplus = tf.nn.softplus

ACTIVATION = {'sigmoid': sigmoid,
'tanh': tanh,
Expand Down
Binary file modified antk/lib/decorate.pyc
Binary file not shown.

0 comments on commit 5bbf1d6

Please sign in to comment.