Skip to content

Commit

Permalink
Merge pull request #3254 from aonotas/new-style-exp-func
Browse files Browse the repository at this point in the history
New style exp function
  • Loading branch information
unnonouno committed Aug 27, 2017
2 parents 3b0fe15 + 2f12358 commit d874a2d
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 10 deletions.
10 changes: 6 additions & 4 deletions chainer/functions/math/exponential.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

from chainer import cuda
from chainer import function
from chainer import function_node
from chainer import utils
from chainer.utils import type_check


class Exp(function.Function):
class Exp(function_node.FunctionNode):

@property
def label(self):
Expand All @@ -26,13 +27,14 @@ def forward_gpu(self, x):
self.retain_outputs((0,))
return cuda.cupy.exp(x[0]),

def backward(self, x, gy):
return utils.force_array(self.output_data[0] * gy[0]),
def backward(self, indexes, gy):
y = self.get_retained_outputs()[0]
return y * gy[0],


def exp(x):
"""Elementwise exponential function."""
return Exp()(x)
return Exp().apply((x,))[0]


class Log(function.Function):
Expand Down
13 changes: 7 additions & 6 deletions tests/chainer_tests/function_hooks_tests/test_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def check_history(self, t, function_type, return_type):


class SimpleLink(chainer.Link):

def __init__(self):
super(SimpleLink, self).__init__()
with self.init_scope():
Expand Down Expand Up @@ -88,7 +89,7 @@ def setUp(self):
self.gy = numpy.random.uniform(-0.1, 0.1, (3, 5)).astype(numpy.float32)

def check_forward(self, x):
self.f(chainer.Variable(x))
self.f.apply((chainer.Variable(x),))
self.assertEqual(1, len(self.h.call_history))
check_history(self, self.h.call_history[0], functions.Exp, float)

Expand All @@ -101,7 +102,7 @@ def test_forward_gpu(self):

def check_backward(self, x, gy):
x = chainer.Variable(x)
y = self.f(x)
y = self.f.apply((x,))[0]
y.grad = gy
y.backward()
self.assertEqual(2, len(self.h.call_history))
Expand Down Expand Up @@ -163,15 +164,15 @@ def setUp(self):

def test_summary(self):
x = self.x
self.f(chainer.Variable(x))
self.f(chainer.Variable(x))
self.f.apply((chainer.Variable(x),))
self.f.apply((chainer.Variable(x),))
self.assertEqual(2, len(self.h.call_history))
self.assertEqual(1, len(self.h.summary()))

def test_print_report(self):
x = self.x
self.f(chainer.Variable(x))
self.f(chainer.Variable(x))
self.f.apply((chainer.Variable(x),))
self.f.apply((chainer.Variable(x),))
io = six.StringIO()
self.h.print_report(file=io)
expect = r'''\AFunctionName ElapsedTime Occurrence
Expand Down
21 changes: 21 additions & 0 deletions tests/chainer_tests/functions_tests/math_tests/test_exponential.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,17 @@ def check_backward_cpu(self, op):
def check_backward_gpu(self, op):
self.check_backward(op, cuda.to_gpu(self.x), cuda.to_gpu(self.gy))

def check_double_backward(self, op, x_data, y_grad, y_grad_grad):
gradient_check.check_double_backward(
op, x_data, y_grad, y_grad_grad, atol=1e-4, rtol=1e-3, dtype='d')

def check_double_backward_cpu(self, op):
self.check_double_backward(op, self.x, self.gy, self.ggy)

def check_double_backward_gpu(self, op):
self.check_double_backward(op, cuda.to_gpu(
self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggy))

def check_label(self, op, expected):
self.assertEqual(op().label, expected)

Expand All @@ -55,6 +66,7 @@ class TestExp(UnaryFunctionsTestBase):
def make_data(self):
x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
gy = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
self.ggy = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
return x, gy

@condition.retry(3)
Expand All @@ -78,6 +90,15 @@ def test_backward_gpu(self):
def test_label(self):
self.check_label(F.Exp, 'exp')

@condition.retry(3)
def test_double_backward_cpu(self):
self.check_double_backward_cpu(F.exp)

@attr.gpu
@condition.retry(3)
def test_double_backward_gpu(self):
self.check_double_backward_gpu(F.exp)


@testing.parameterize(*testing.product({
'shape': [(3, 2), ()],
Expand Down

0 comments on commit d874a2d

Please sign in to comment.