Skip to content

Commit

Permalink
Merge branch 'master' into new_snapshot
Browse files Browse the repository at this point in the history
  • Loading branch information
niboshi committed Apr 2, 2018
2 parents 9fe0f8b + 8511bda commit e153581
Show file tree
Hide file tree
Showing 69 changed files with 505 additions and 287 deletions.
10 changes: 0 additions & 10 deletions chainer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,16 @@

from chainer import _version
from chainer import backends # NOQA
from chainer import configuration # NOQA
from chainer import dataset # NOQA
from chainer import datasets # NOQA
from chainer import function # NOQA
from chainer import function_hook # NOQA
from chainer import function_hooks # NOQA
from chainer import function_node # NOQA
from chainer import functions # NOQA
from chainer import initializer # NOQA
from chainer import initializers # NOQA
from chainer import iterators # NOQA
from chainer import link # NOQA
from chainer import links # NOQA
from chainer import optimizer # NOQA
from chainer import optimizers # NOQA
from chainer import reporter # NOQA
from chainer import serializer # NOQA
from chainer import serializers # NOQA
from chainer import training # NOQA
from chainer import variable # NOQA


# import class and function
Expand Down
3 changes: 1 addition & 2 deletions chainer/configuration.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from __future__ import print_function
import contextlib
import sys
import threading
Expand Down Expand Up @@ -79,7 +78,7 @@ def _print_attrs(obj, keys, file):
max_len = max(len(key) for key in keys)
for key in keys:
spacer = ' ' * (max_len - len(key))
print(u'{} {}{}'.format(key, spacer, getattr(obj, key)), file=file)
file.write(u'{} {}{}\n'.format(key, spacer, getattr(obj, key)))


global_config = GlobalConfig()
Expand Down
8 changes: 1 addition & 7 deletions chainer/dataset/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,4 @@
from chainer.dataset import convert # NOQA
from chainer.dataset import dataset_mixin # NOQA
from chainer.dataset import download # NOQA
from chainer.dataset import iterator # NOQA


# import class and function
# import classes and functions
from chainer.dataset.convert import concat_examples # NOQA
from chainer.dataset.convert import ConcatWithAsyncTransfer # NOQA
from chainer.dataset.convert import to_device # NOQA
Expand Down
21 changes: 7 additions & 14 deletions chainer/dataset/download.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from __future__ import print_function
import hashlib
import os
import shutil
import sys
import tempfile

import filelock
from six.moves.urllib import request

from chainer import utils


_dataset_root = os.environ.get('CHAINER_DATASET_ROOT',
os.path.expanduser('~/.chainer/dataset'))
Expand Down Expand Up @@ -101,15 +101,12 @@ def cached_download(url):
if os.path.exists(cache_path):
return cache_path

temp_root = tempfile.mkdtemp(dir=cache_root)
try:
with utils.tempdir(dir=cache_root) as temp_root:
temp_path = os.path.join(temp_root, 'dl')
print('Downloading from {}...'.format(url), file=sys.stderr)
sys.stderr.write('Downloading from {}...\n'.format(url))
request.urlretrieve(url, temp_path)
with filelock.FileLock(lock_path):
shutil.move(temp_path, cache_path)
finally:
shutil.rmtree(temp_root)

return cache_path

Expand Down Expand Up @@ -141,10 +138,6 @@ def cache_or_load_file(path, creator, loader):
if os.path.exists(path):
return loader(path)

file_name = os.path.basename(path)
temp_dir = tempfile.mkdtemp()
temp_path = os.path.join(temp_dir, file_name)

try:
os.makedirs(_dataset_root)
except OSError:
Expand All @@ -153,12 +146,12 @@ def cache_or_load_file(path, creator, loader):

lock_path = os.path.join(_dataset_root, '_create_lock')

try:
with utils.tempdir() as temp_dir:
file_name = os.path.basename(path)
temp_path = os.path.join(temp_dir, file_name)
content = creator(temp_path)
with filelock.FileLock(lock_path):
if not os.path.exists(path):
shutil.move(temp_path, path)
finally:
shutil.rmtree(temp_dir)

return content
14 changes: 1 addition & 13 deletions chainer/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,4 @@
from chainer.datasets import cifar # NOQA
from chainer.datasets import dict_dataset # NOQA
from chainer.datasets import fashion_mnist # NOQA
from chainer.datasets import image_dataset # NOQA
from chainer.datasets import mnist # NOQA
from chainer.datasets import ptb # NOQA
from chainer.datasets import sub_dataset # NOQA
from chainer.datasets import svhn # NOQA
from chainer.datasets import transform_dataset # NOQA
from chainer.datasets import tuple_dataset # NOQA


# import class and function
# import classes and functions
from chainer.datasets.cifar import get_cifar10 # NOQA
from chainer.datasets.cifar import get_cifar100 # NOQA
from chainer.datasets.concatenated_dataset import ConcatenatedDataset # NOQA
Expand Down
8 changes: 1 addition & 7 deletions chainer/function_hooks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,4 @@
from chainer.function_hooks import cuda_profile # NOQA
from chainer.function_hooks import cupy_memory_profile # NOQA
from chainer.function_hooks import debug_print # NOQA
from chainer.function_hooks import timer # NOQA


# import class and function
# import classes and functions
from chainer.function_hooks.cuda_profile import CUDAProfileHook # NOQA
from chainer.function_hooks.cupy_memory_profile import CupyMemoryProfileHook # NOQA
from chainer.function_hooks.debug_print import PrintHook # NOQA
Expand Down
3 changes: 1 addition & 2 deletions chainer/function_hooks/debug_print.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from __future__ import print_function
import sys
import warnings

Expand Down Expand Up @@ -60,7 +59,7 @@ def __init__(self, sep=None, end='\n', file=sys.stdout, flush=True):
self.flush = flush

def _print(self, msg):
print(msg, end=self.end, file=self.file)
self.file.write(msg + self.end)

def _process(self, function, in_data, out_grad=None):
self._print('function\t{}'.format(function.label))
Expand Down
20 changes: 20 additions & 0 deletions chainer/function_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -891,8 +891,28 @@ def _backprop(outputs, inputs, grad_required, retain_grad, grads, loss_scale):
gys = tuple([gy if not isinstance(gy, tuple) else
chainer.functions.add(*gy)
for gy in gys])

# Call pre-backward hooks
hooks = chainer.get_function_hooks()
if func._n_local_function_hooks != 0:
hooks = collections.OrderedDict(hooks)
hooks.update(func.local_function_hooks)
hooks = hooks.values() # avoid six for performance

in_data = tuple([x.data for x in func.inputs])
out_grad_data = tuple(
[None if g is None else g.data for g in gys])
cuda.get_device_from_array(*in_data).use()

for hook in hooks:
hook.backward_preprocess(func, in_data, out_grad_data)

new_gxs = func.backward_accumulate(input_indexes, gys, gxs)

# Call post-backward hooks
for hook in hooks:
hook.backward_postprocess(func, in_data, out_grad_data)

# Delete output gradients that are not required to return
for y_ref in func.outputs:
y = y_ref()
Expand Down
2 changes: 1 addition & 1 deletion chainer/functions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,4 +311,4 @@
from chainer.functions.util.forget import Forget # NOQA

# Aliases
mean = average
from chainer.functions.math.average import average as mean # NOQA
3 changes: 1 addition & 2 deletions chainer/functions/connection/deconvolution_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,7 @@ def forward_cpu(self, inputs):
# Grouped convolution implementation
return self._forward_grouped_convolution(x, W, b)

elif ((self.dy == 1 and self.dx == 1)
and intel64.should_use_ideep('>=auto')
elif (intel64.should_use_ideep('>=auto')
and intel64.inputs_all_ready(inputs)):
# iDeep implementation
self._use_ideep = True
Expand Down
Loading

0 comments on commit e153581

Please sign in to comment.