Skip to content

Commit

Permalink
[DARKNET FRONTEND]Batchnorm added as part of Dense op for running rnn…
Browse files Browse the repository at this point in the history
… model for next wo… (apache#1385)
  • Loading branch information
siju-samuel authored and tqchen committed Jul 9, 2018
1 parent 6cdc18e commit 4e7b548
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 6 deletions.
20 changes: 14 additions & 6 deletions nnvm/python/nnvm/frontend/darknet.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,13 +226,18 @@ def _darknet_dense(inputs, attrs):
"""Process the dense operation."""
op_name, new_attrs = 'dense', {}
new_attrs['units'] = _darknet_required_attr(attrs, 'num_hidden')

out_name = {}
if attrs.get('use_bias', False) is True:
new_attrs['use_bias'] = True
if attrs.get('use_flatten', False) is True:
inputs[0] = _sym.flatten(inputs[0])
sym = _darknet_get_nnvm_op(op_name)(*inputs, **new_attrs)
out_name = sym.list_output_names()[0].replace('_output', '')
out_name[0] = sym.list_output_names()[0].replace('_output', '')
if 'use_batchNorm' in attrs:
op_name, new_attrs = 'batch_norm', {}
new_attrs['epsilon'] = 0.000001
sym = _darknet_get_nnvm_op(op_name)(*sym, **new_attrs)
out_name[1] = sym.list_output_names()[0].replace('_output', '')
if 'activation' in attrs:
new_attrs = {}
new_attrs['activation'] = attrs['activation']
Expand Down Expand Up @@ -430,13 +435,16 @@ def _get_connected_weights(layer, opname, params, dtype):
weights = _read_memory_buffer((layer.outputs, layer.inputs), layer.weights, dtype)
biases = _read_memory_buffer((layer.outputs, ), layer.biases, dtype)

k = _get_tvm_params_name(opname, 'weight')
k = _get_tvm_params_name(opname[0], 'weight')
params[k] = tvm.nd.array(weights)
k = _get_tvm_params_name(opname, 'bias')
params[k] = tvm.nd.array(biases)

if layer.batch_normalize == 1 and layer.dontloadscales != 1:
_get_batchnorm_weights(layer, opname, params, layer.outputs, dtype)
_get_batchnorm_weights(layer, opname[1], params, layer.outputs, dtype)
k = _get_tvm_params_name(opname[1], 'beta')
params[k] = tvm.nd.array(biases)
else:
k = _get_tvm_params_name(opname[0], 'bias')
params[k] = tvm.nd.array(biases)

def _get_batchnorm_weights(layer, opname, params, size, dtype):
"""Parse the weights for batchnorm, which includes, scales, moving mean
Expand Down
15 changes: 15 additions & 0 deletions nnvm/tests/python/frontend/darknet/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,20 @@ def test_forward_dense():
test_forward(net)
LIB.free_network(net)

def test_forward_dense_batchnorm():
'''test fully connected layer with batchnorm'''
net = LIB.make_network(1)
layer = LIB.make_connected_layer(1, 12, 2, 1, 1, 0)
for i in range(5):
layer.rolling_mean[i] = np.random.rand(1)
layer.rolling_variance[i] = np.random.rand(1)
layer.scales[i] = np.random.rand(1)
net.layers[0] = layer
net.w = net.h = 2
LIB.resize_network(net, 2, 2)
test_forward(net)
LIB.free_network(net)

def test_forward_maxpooling():
'''test maxpooling layer'''
net = LIB.make_network(1)
Expand Down Expand Up @@ -264,6 +278,7 @@ def test_forward_elu():
test_forward_batch_norm()
test_forward_shortcut()
test_forward_dense()
test_forward_dense_batchnorm()
test_forward_reorg()
test_forward_region()
test_forward_elu()

0 comments on commit 4e7b548

Please sign in to comment.