Skip to content

Commit

Permalink
Unnecessary changes rolled back
Browse files Browse the repository at this point in the history
  • Loading branch information
vikramnitin9 committed Jul 12, 2017
1 parent 870e891 commit 2ab0c3b
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 14 deletions.
2 changes: 1 addition & 1 deletion theano/tensor/nnet/opt.py
Expand Up @@ -200,7 +200,6 @@ def local_abstractconv_gradinputs_gemm(node):
subsample=node.op.subsample,
filter_dilation=node.op.filter_dilation,
unshared=node.op.unshared)(kern, topgrad, shape)

copy_stack_trace(node.outputs[0], rval)

return [rval]
Expand Down Expand Up @@ -449,6 +448,7 @@ def local_conv2d_gradinputs_cpu(node):
if (not isinstance(node.op, AbstractConv2d_gradInputs) or
node.inputs[0].dtype == 'float16'):
return None

kern, topgrad, shape = node.inputs

if ((not isinstance(kern.type, TensorType) or
Expand Down
25 changes: 12 additions & 13 deletions theano/tensor/nnet/tests/test_corr.py
Expand Up @@ -455,7 +455,7 @@ def test_fwd(self):
unshared_func = theano.function([self.input, self.filters], conv_unshared)
unshared_val = unshared_func(inputs_val, filters_val)

conv_ref = theano.tensor.nnet.abstract_conv.conv2d(self.input, self.filters, filter_flip=False,
conv_ref = theano.tensor.nnet.abstract_conv.conv2d(self.input, self.filters, filter_flip=True,
unshared=True, subsample=self.sub, filter_dilation=self.dil)
ref_func = theano.function([self.input, self.filters], conv_ref)
ref_val = ref_func(inputs_val, filters_val)
Expand All @@ -468,33 +468,32 @@ def test_gradweight(self):

conv_unshared = corr.CorrMM_gradWeights(unshared=True, subsample=self.sub,
filter_dilation=self.dil)(self.input, self.topgrad, self.kshp[-2:])
unshared_func = theano.function([self.input, self.topgrad], conv_unshared)
unshared_func = theano.function([self.input, self.topgrad], conv_unshared, mode=self.mode)
unshared_val = unshared_func(inputs_val, topgrad_val)

conv_ref = theano.tensor.nnet.abstract_conv.conv2d_grad_wrt_weights(self.input, self.topgrad, self.kshp,
filter_flip=False, unshared=True,
subsample=self.sub, filter_dilation=self.dil)
ref_func = theano.function([self.input, self.topgrad], conv_ref)
ref_func = theano.function([self.input, self.topgrad], conv_ref,
mode=theano.compile.mode.Mode(optimizer='None'))
ref_val = ref_func(inputs_val, topgrad_val)

utt.assert_allclose(ref_val, unshared_val)

def test_gradinput(self):
filters_val = np.random.random(self.kshp).astype('float32')
topgrad_val = np.random.random(self.topgrad_shape).astype('float32')
filters_val = np.random.random(self.kshp).astype(theano.config.floatX)
topgrad_val = np.random.random(self.topgrad_shape).astype(theano.config.floatX)

conv_unshared = corr.CorrMM_gradInputs(unshared=True, subsample=self.sub,
filter_dilation=self.dil)(self.filters, self.topgrad, self.imshp[-2:])
unshared_func = theano.function([self.filters, self.topgrad], conv_unshared, mode="FAST_RUN")
unshared_func = theano.function([self.filters, self.topgrad], conv_unshared, mode=self.mode)
unshared_val = unshared_func(filters_val, topgrad_val)

conv_ref = theano.tensor.nnet.abstract_conv.AbstractConv2d_gradInputs(self.imshp, self.kshp, border_mode="valid",
filter_flip=False, unshared=True,
subsample=self.sub,
filter_dilation=self.dil)(self.filters,
self.topgrad,
self.imshp[-2:])
ref_func = theano.function([self.topgrad, self.filters], conv_ref, mode=None)
conv_ref = theano.tensor.nnet.abstract_conv.conv2d_grad_wrt_inputs(self.topgrad, self.filters, self.imshp,
filter_flip=False, unshared=True,
subsample=self.sub, filter_dilation=self.dil)
ref_func = theano.function([self.topgrad, self.filters], conv_ref,
mode=theano.compile.mode.Mode(optimizer='None'))
ref_val = ref_func(topgrad_val, filters_val)

utt.assert_allclose(ref_val, unshared_val)

0 comments on commit 2ab0c3b

Please sign in to comment.