Skip to content

Commit

Permalink
Merge pull request #899 from pfnet/debug-mode
Browse files Browse the repository at this point in the history
Debug mode
  • Loading branch information
beam2d committed Mar 1, 2016
2 parents 795d26b + 9bb2f17 commit 7a33884
Show file tree
Hide file tree
Showing 7 changed files with 164 additions and 1 deletion.
27 changes: 27 additions & 0 deletions chainer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,31 @@
OFF = flag.OFF
AUTO = flag.AUTO

_debug = False


def is_debug():
"""Get the debug mode.
Returns:
bool: Return ``True`` if Chainer is in debug mode.
"""
return _debug


def set_debug(debug):
"""Set the debug mode.
note::
This method changes global state. When you use this method on
multi-threading environment, it may affects other threads.
Args:
debug (bool): New debug mode.
"""
global _debug
_debug = debug


basic_math.install_variable_arithmetics()
18 changes: 18 additions & 0 deletions chainer/function.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import os
import traceback
import weakref

import chainer
from chainer import cuda
from chainer import flag
from chainer.utils import type_check
Expand Down Expand Up @@ -98,13 +100,22 @@ def __call__(self, *inputs):
"""
in_data = tuple([x.data for x in inputs])
if chainer.is_debug():
self._stack = traceback.extract_stack()

if self.type_check_enable:
self._check_data_type_forward(in_data)
# Forward prop
with cuda.get_device(*in_data):
outputs = self.forward(in_data)
assert type(outputs) == tuple

if chainer.is_debug():
if any(cuda.get_array_module(out).isnan(out).any()
for out in outputs):
msg = 'NaN is detected on forward computation'
raise RuntimeError(msg)

out_v = flag.aggregate_flags([x.volatile for x in inputs])
ret = tuple([variable.Variable(y, volatile=out_v) for y in outputs])

Expand Down Expand Up @@ -132,6 +143,13 @@ def label(self):
"""
return self.__class__.__name__

@property
def stack(self):
if hasattr(self, '_stack'):
return self._stack
else:
return None

def _check_data_type_forward(self, in_data):
in_type = type_check.get_types(in_data, 'in_types', False)
try:
Expand Down
20 changes: 19 additions & 1 deletion chainer/variable.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,31 @@
import heapq
import traceback

import numpy

import chainer
from chainer import cuda
from chainer import flag


def _check_grad_type(func, x, gx):
def make_message(message):
if func:
detail = '''Function {0} ({1}) has a bug.
detail = 'Function `{0}` ({1}) has a bug.\n'.format(
type(func).__name__, func.label)

stack = func.stack
if stack:
detail += 'Stacktrace of the function is below:\n'
for line in traceback.format_list(func._stack):
detail += line

detail += '''
Please report this error to the issue tracker with the stack trace,
the information of your environment, and your script:
https://github.com/pfnet/chainer/issues/new.
'''.format(type(func).__name__, func.label)

else:
detail = ''

Expand Down Expand Up @@ -328,6 +340,12 @@ def add_cand(cand):
gxs = func.backward(in_data, out_grad)
assert len(gxs) == len(in_data)

if chainer.is_debug():
if any(cuda.get_array_module(gx).isnan(gx).any()
for gx in gxs):
msg = 'NaN is detected on backward computation'
raise RuntimeError(msg)

if not retain_grad:
for y in outputs:
if y is not None and y is not self:
Expand Down
1 change: 1 addition & 0 deletions docs/source/reference/core.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ Core functionalities
core/link
core/optimizer
core/serializer
core/debug
core/function_set
13 changes: 13 additions & 0 deletions docs/source/reference/core/debug.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Debug mode
==========

In debug mode, Chainer checks values of variables on runtime and shows more
detailed error messages.
It helps you to debug your programs.
Instead it requires additional overhead time.


.. currentmodule:: chainer

.. autofunction:: is_debug
.. autofunction:: set_debug
46 changes: 46 additions & 0 deletions tests/chainer_tests/test_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,4 +316,50 @@ def test_forward_invalid(self):
f(v)


class TestFunctionDebug(unittest.TestCase):

def setUp(self):
chainer.set_debug(True)
self.one = numpy.array([1], numpy.float32)
self.nan = numpy.array([float('nan')], numpy.float32)
self.f = chainer.Function()

def tearDown(self):
chainer.set_debug(False)

def check_debug_forward(self, x_data):
x = chainer.Variable(x_data)
with self.assertRaises(RuntimeError):
self.f(x)

def test_debug_forward_cpu(self):
self.f.forward_cpu = mock.MagicMock(return_value=(self.nan,))
self.check_debug_forward(self.one)

@attr.gpu
def test_debug_forward_gpu(self):
self.f.forward_gpu = mock.MagicMock(
return_value=(cuda.to_gpu(self.nan),))
self.check_debug_forward(cuda.to_gpu(self.one))

def check_debug_backward(self, x_data):
x = chainer.Variable(x_data)
y = self.f(x)
with self.assertRaises(RuntimeError):
y.backward()

def test_debug_backward_cpu(self):
self.f.forward_cpu = mock.MagicMock(return_value=(self.one,))
self.f.backward_cpu = mock.MagicMock(return_value=(self.nan,))
self.check_debug_backward(self.one)

@attr.gpu
def test_debug_backward_gpu(self):
self.f.forward_gpu = mock.MagicMock(
return_value=(cuda.to_gpu(self.one),))
self.f.backward_gpu = mock.MagicMock(
return_value=(cuda.to_gpu(self.nan),))
self.check_debug_backward(cuda.to_gpu(self.one))


testing.run_module(__name__, __file__)
40 changes: 40 additions & 0 deletions tests/chainer_tests/test_variable.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import inspect
import unittest

import numpy as np
Expand Down Expand Up @@ -523,4 +524,43 @@ def test_shape_mismatch_gpu(self):
self.check_shape_mismatch(cuda.to_gpu(self.x))


class TestVariableBackwardErrorTraceback(unittest.TestCase):

def setUp(self):
self.x = np.array([1], np.float32)
chainer.set_debug(True)

def tearDown(self):
chainer.set_debug(False)

def check_traceback(self, x_data):
xp = cuda.get_array_module(x_data)

class DummyFunction(chainer.Function):
label = 'dummy_function'

def forward(self, inputs):
return xp.array(1, np.float32),

def backward(self, inputs, grads):
return xp.array([1, 2], np.float32),

x = chainer.Variable(x_data)
line = inspect.currentframe().f_lineno + 1
y = DummyFunction()(x) # `line` is THIS line
try:
y.backward()
self.fail()
except ValueError as e:
self.assertIn('Stacktrace', str(e))
self.assertIn('line %d' % line, str(e))

def test_traceback_cpu(self):
self.check_traceback(self.x)

@attr.gpu
def test_traceback_gpu(self):
self.check_traceback(cuda.to_gpu(self.x))


testing.run_module(__name__, __file__)

0 comments on commit 7a33884

Please sign in to comment.