Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into disable-npz-pickle
Browse files Browse the repository at this point in the history
  • Loading branch information
kmaehashi committed May 29, 2018
2 parents c94f74a + 03b9431 commit 7f61488
Show file tree
Hide file tree
Showing 232 changed files with 7,209 additions and 1,252 deletions.
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -11,8 +11,8 @@
[**Website**](https://chainer.org/)
| [**Docs**](https://docs.chainer.org/en/stable/)
| [**Install Guide**](https://docs.chainer.org/en/stable/install.html)
| [**Tutorial**](https://docs.chainer.org/en/stable/tutorial/)
| **Examples** ([Official](https://github.com/chainer/chainer/tree/master/examples), [External](https://github.com/chainer/chainer/wiki/External-examples))
| [**Tutorial**](https://docs.chainer.org/en/stable/guides/)
| **Examples** ([Official](https://github.com/chainer/chainer/tree/master/examples), [External](https://github.com/chainer-community/awesome-chainer))

**Forum** ([en](https://groups.google.com/forum/#!forum/chainer), [ja](https://groups.google.com/forum/#!forum/chainer-jp))
| **Slack invitation** ([en](https://bit.ly/join-chainer-slack), [ja](https://bit.ly/join-chainer-jp-slack))
Expand All @@ -26,7 +26,7 @@ For more details of Chainer, see the documents and resources listed above and jo

## Stable version

The stable version of current Chainer is separated in here: [v3](https://github.com/chainer/chainer/tree/v3).
The stable version of current Chainer is separated in here: [v4](https://github.com/chainer/chainer/tree/v4).

## Installation

Expand Down
12 changes: 2 additions & 10 deletions chainer/__init__.py
Expand Up @@ -7,30 +7,21 @@

from chainer import _version
from chainer import backends # NOQA
from chainer import configuration # NOQA
from chainer import dataset # NOQA
from chainer import datasets # NOQA
from chainer import function # NOQA
from chainer import function_hook # NOQA
from chainer import function_hooks # NOQA
from chainer import function_node # NOQA
from chainer import functions # NOQA
from chainer import initializer # NOQA
from chainer import initializers # NOQA
from chainer import iterators # NOQA
from chainer import link # NOQA
from chainer import links # NOQA
from chainer import optimizer # NOQA
from chainer import optimizers # NOQA
from chainer import reporter # NOQA
from chainer import serializer # NOQA
from chainer import serializers # NOQA
from chainer import training # NOQA
from chainer import variable # NOQA


# import class and function
# These functions from backends.cuda are kept for backward compatibility
from chainer._runtime_info import print_runtime_info # NOQA
from chainer.backends.cuda import should_use_cudnn # NOQA
from chainer.backends.cuda import should_use_cudnn_tensor_core # NOQA
from chainer.configuration import config # NOQA
Expand Down Expand Up @@ -58,6 +49,7 @@
from chainer.reporter import report_scope # NOQA
from chainer.reporter import Reporter # NOQA
from chainer.reporter import Summary # NOQA
from chainer.sequential import Sequential # NOQA
from chainer.serializer import AbstractSerializer # NOQA
from chainer.serializer import Deserializer # NOQA
from chainer.serializer import Serializer # NOQA
Expand Down
46 changes: 46 additions & 0 deletions chainer/_runtime_info.py
@@ -0,0 +1,46 @@
import sys

import numpy
import six

import chainer
from chainer.backends import cuda


class _RuntimeInfo(object):

chainer_version = None
numpy_version = None
cuda_info = None

def __init__(self):
self.chainer_version = chainer.__version__
self.numpy_version = numpy.__version__
if cuda.available:
self.cuda_info = cuda.cupyx.get_runtime_info()
else:
self.cuda_info = None

def __str__(self):
s = six.StringIO()
s.write('''Chainer: {}\n'''.format(self.chainer_version))
s.write('''NumPy: {}\n'''.format(self.numpy_version))
if self.cuda_info is None:
s.write('''CuPy: Not Available\n''')
else:
s.write('''CuPy:\n''')
for line in str(self.cuda_info).splitlines():
s.write(''' {}\n'''.format(line))
return s.getvalue()


def get_runtime_info():
return _RuntimeInfo()


def print_runtime_info(out=None):
if out is None:
out = sys.stdout
out.write(str(get_runtime_info()))
if hasattr(out, 'flush'):
out.flush()
2 changes: 1 addition & 1 deletion chainer/_version.py
@@ -1 +1 @@
__version__ = '4.0.0rc1'
__version__ = '5.0.0b1'
10 changes: 9 additions & 1 deletion chainer/backends/cuda.py
Expand Up @@ -168,8 +168,16 @@ def get_device_from_array(*arrays):
The device on which the given CuPy array reside is returned.
.. note::
This method only recognizes :class:`cupy.ndarray`\\ s in arguments.
Especially note that, unlike :func:`get_array_module`, this method
does not recognize :class:`~chainer.Variable` objects.
If you need to get device from the :class:`~chainer.Variable` instance
``v``, you need to use ``get_device_from_array(v.array)``.
Args:
array (cupy.ndarray or list of cupy.ndarray):
arrays (:class:`cupy.ndarray` or list of :class:`cupy.ndarray`):
A CuPy array which this function returns the device corresponding
to. If a list of :class:`cupy.ndarray`\\ s are given, it returns
the first device object of an array in the list.
Expand Down
22 changes: 16 additions & 6 deletions chainer/backends/intel64.py
Expand Up @@ -87,28 +87,38 @@ def should_use_ideep(level):


def inputs_all_ready(inputs, supported_ndim=(2, 4)):
"""Checks if input arrays are supported for ideep optimization.
"""Checks if input arrays are supported for an iDeep primitive.
Before calling an iDeep primitive (e.g., ``ideep4py.linear.Forward``), you
need to make sure that all input arrays are ready for the primitive by
calling this function.
Information to be checked includes array types, dimesions and data types.
The function checks ``inputs`` info and ``supported_ndim``.
Inputs to be tested can be any of ``Variable``, ``numpy.ndarray`` or
``ideep4py.mdarray``. However, all inputs to iDeep primitives must be
``ideep4py.mdarray``. Callers of iDeep primitives are responsible of
converting all inputs to ``ideep4py.mdarray``.
Args:
inputs (sequence of arrays or variables``):
inputs (sequence of arrays or variables):
Inputs to be checked.
supported_ndim (tuple of ints):
Supported ndim values.
iDeep supports array dimension in either 2 or 4 only.
Supported ndim values for the iDeep primitive.
Returns:
bool: ``True`` if all conditions meet.
"""

def _is_supported_array_type(a):
return isinstance(a, ideep.mdarray) or ideep.check_type([a])

if _ideep_version is None:
return False

inputs = [x.data if isinstance(x, chainer.variable.Variable)
else x for x in inputs]

return (ideep.check_ndim(inputs, supported_ndim)
and (all([isinstance(a, ideep.mdarray) for a in inputs])
or ideep.check_type(inputs)))
and all([_is_supported_array_type(a) for a in inputs]))
3 changes: 1 addition & 2 deletions chainer/configuration.py
@@ -1,4 +1,3 @@
from __future__ import print_function
import contextlib
import sys
import threading
Expand Down Expand Up @@ -79,7 +78,7 @@ def _print_attrs(obj, keys, file):
max_len = max(len(key) for key in keys)
for key in keys:
spacer = ' ' * (max_len - len(key))
print(u'{} {}{}'.format(key, spacer, getattr(obj, key)), file=file)
file.write(u'{} {}{}\n'.format(key, spacer, getattr(obj, key)))


global_config = GlobalConfig()
Expand Down
8 changes: 1 addition & 7 deletions chainer/dataset/__init__.py
@@ -1,10 +1,4 @@
from chainer.dataset import convert # NOQA
from chainer.dataset import dataset_mixin # NOQA
from chainer.dataset import download # NOQA
from chainer.dataset import iterator # NOQA


# import class and function
# import classes and functions
from chainer.dataset.convert import concat_examples # NOQA
from chainer.dataset.convert import ConcatWithAsyncTransfer # NOQA
from chainer.dataset.convert import to_device # NOQA
Expand Down
21 changes: 7 additions & 14 deletions chainer/dataset/download.py
@@ -1,13 +1,13 @@
from __future__ import print_function
import hashlib
import os
import shutil
import sys
import tempfile

import filelock
from six.moves.urllib import request

from chainer import utils


_dataset_root = os.environ.get('CHAINER_DATASET_ROOT',
os.path.expanduser('~/.chainer/dataset'))
Expand Down Expand Up @@ -101,15 +101,12 @@ def cached_download(url):
if os.path.exists(cache_path):
return cache_path

temp_root = tempfile.mkdtemp(dir=cache_root)
try:
with utils.tempdir(dir=cache_root) as temp_root:
temp_path = os.path.join(temp_root, 'dl')
print('Downloading from {}...'.format(url), file=sys.stderr)
sys.stderr.write('Downloading from {}...\n'.format(url))
request.urlretrieve(url, temp_path)
with filelock.FileLock(lock_path):
shutil.move(temp_path, cache_path)
finally:
shutil.rmtree(temp_root)

return cache_path

Expand Down Expand Up @@ -141,10 +138,6 @@ def cache_or_load_file(path, creator, loader):
if os.path.exists(path):
return loader(path)

file_name = os.path.basename(path)
temp_dir = tempfile.mkdtemp()
temp_path = os.path.join(temp_dir, file_name)

try:
os.makedirs(_dataset_root)
except OSError:
Expand All @@ -153,12 +146,12 @@ def cache_or_load_file(path, creator, loader):

lock_path = os.path.join(_dataset_root, '_create_lock')

try:
with utils.tempdir() as temp_dir:
file_name = os.path.basename(path)
temp_path = os.path.join(temp_dir, file_name)
content = creator(temp_path)
with filelock.FileLock(lock_path):
if not os.path.exists(path):
shutil.move(temp_path, path)
finally:
shutil.rmtree(temp_dir)

return content
18 changes: 18 additions & 0 deletions chainer/dataset/iterator.py
Expand Up @@ -61,6 +61,24 @@ def finalize(self):
"""
pass

def __enter__(self):
"""With statement context manager method
This method does nothing by default. Implementation may override it to
better handle the internal resources by with statement.
"""
return self

def __exit__(self, exc_type, exc_value, traceback):
"""With statement context manager method
This method does nothing by default. Implementation may override it to
better handle the internal resources by with statement.
"""
return None

def serialize(self, serializer):
"""Serializes the internal state of the iterator.
Expand Down
14 changes: 1 addition & 13 deletions chainer/datasets/__init__.py
@@ -1,16 +1,4 @@
from chainer.datasets import cifar # NOQA
from chainer.datasets import dict_dataset # NOQA
from chainer.datasets import fashion_mnist # NOQA
from chainer.datasets import image_dataset # NOQA
from chainer.datasets import mnist # NOQA
from chainer.datasets import ptb # NOQA
from chainer.datasets import sub_dataset # NOQA
from chainer.datasets import svhn # NOQA
from chainer.datasets import transform_dataset # NOQA
from chainer.datasets import tuple_dataset # NOQA


# import class and function
# import classes and functions
from chainer.datasets.cifar import get_cifar10 # NOQA
from chainer.datasets.cifar import get_cifar100 # NOQA
from chainer.datasets.concatenated_dataset import ConcatenatedDataset # NOQA
Expand Down
9 changes: 7 additions & 2 deletions chainer/datasets/transform_dataset.py
Expand Up @@ -14,14 +14,19 @@ class TransformDataset(dataset_mixin.DatasetMixin):
The function :obj:`transform` takes, as an argument, :obj:`in_data`, which
is the output of the base dataset's :meth:`__getitem__`, and returns
the transformed arrays as output. Please see the following example.
the transformed arrays as output. Please see the following example. Since
:obj:`in_data` directly refers to the item in the dataset, take care that
:obj:`transform` not modify it. For example, note that the line
`img = img - 0.5` bellow is correct since it makes a copy of `img`.
However, it would be incorrect to use `img -= 0.5` since that would update
the contents of the item in the dataset in place, corrupting it.
>>> from chainer.datasets import get_mnist
>>> from chainer.datasets import TransformDataset
>>> dataset, _ = get_mnist()
>>> def transform(in_data):
... img, label = in_data
... img -= 0.5 # scale to [-0.5, -0.5]
... img = img - 0.5 # scale to [-0.5, -0.5]
... return img, label
>>> dataset = TransformDataset(dataset, transform)
Expand Down
1 change: 1 addition & 0 deletions chainer/exporters/__init__.py
@@ -0,0 +1 @@
from chainer.exporters import caffe # NOQA

0 comments on commit 7f61488

Please sign in to comment.