Skip to content

Commit

Permalink
Errors fixed
Browse files Browse the repository at this point in the history
  • Loading branch information
vikramnitin9 committed Jul 12, 2017
1 parent ed11d33 commit 870e891
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 9 deletions.
2 changes: 1 addition & 1 deletion theano/tensor/nnet/corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -784,7 +784,7 @@ def make_node(self, kern, topgrad, shape=None):
height_width = [as_tensor_variable(shape[0]).astype('int64'),
as_tensor_variable(shape[1]).astype('int64')]

broadcastable = [topgrad.type.broadcastable[0], kern.type.broadcastable[1],
broadcastable = [topgrad.type.broadcastable[0], kern.type.broadcastable[-3],
False, False]
dtype = kern.type.dtype
return Apply(self, [kern, topgrad] + height_width,
Expand Down
2 changes: 1 addition & 1 deletion theano/tensor/nnet/opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ def local_abstractconv_gradinputs_gemm(node):
subsample=node.op.subsample,
filter_dilation=node.op.filter_dilation,
unshared=node.op.unshared)(kern, topgrad, shape)

copy_stack_trace(node.outputs[0], rval)

return [rval]
Expand Down Expand Up @@ -448,7 +449,6 @@ def local_conv2d_gradinputs_cpu(node):
if (not isinstance(node.op, AbstractConv2d_gradInputs) or
node.inputs[0].dtype == 'float16'):
return None

kern, topgrad, shape = node.inputs

if ((not isinstance(kern.type, TensorType) or
Expand Down
17 changes: 10 additions & 7 deletions theano/tensor/nnet/tests/test_corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,18 +480,21 @@ def test_gradweight(self):
utt.assert_allclose(ref_val, unshared_val)

def test_gradinput(self):
filters_val = np.random.random(self.kshp).astype(theano.config.floatX)
topgrad_val = np.random.random(self.topgrad_shape).astype(theano.config.floatX)
filters_val = np.random.random(self.kshp).astype('float32')
topgrad_val = np.random.random(self.topgrad_shape).astype('float32')

conv_unshared = corr.CorrMM_gradInputs(unshared=True, subsample=self.sub,
filter_dilation=self.dil)(self.filters, self.topgrad, self.imshp[-2:])
unshared_func = theano.function([self.filters, self.topgrad], conv_unshared)
unshared_func = theano.function([self.filters, self.topgrad], conv_unshared, mode="FAST_RUN")
unshared_val = unshared_func(filters_val, topgrad_val)

conv_ref = theano.tensor.nnet.abstract_conv.conv2d_grad_wrt_inputs(self.topgrad, self.filters, self.imshp,
filter_flip=False, unshared=True,
subsample=self.sub, filter_dilation=self.dil)
ref_func = theano.function([self.topgrad, self.filters], conv_ref)
conv_ref = theano.tensor.nnet.abstract_conv.AbstractConv2d_gradInputs(self.imshp, self.kshp, border_mode="valid",
filter_flip=False, unshared=True,
subsample=self.sub,
filter_dilation=self.dil)(self.filters,
self.topgrad,
self.imshp[-2:])
ref_func = theano.function([self.topgrad, self.filters], conv_ref, mode=None)
ref_val = ref_func(topgrad_val, filters_val)

utt.assert_allclose(ref_val, unshared_val)

0 comments on commit 870e891

Please sign in to comment.