Skip to content

Commit

Permalink
Merge pull request #450 from sony/feature/20190527-variable-comparison
Browse files Browse the repository at this point in the history
[python] Changes definition of Variable comparison and hash
  • Loading branch information
TakuyaYashima committed May 31, 2019
2 parents ed7aea5 + ddcbf46 commit 6cabca6
Show file tree
Hide file tree
Showing 5 changed files with 159 additions and 62 deletions.
3 changes: 3 additions & 0 deletions doc/conf.py
Expand Up @@ -59,6 +59,9 @@

blockdiag_html_image_format = "SVG"

# Default role of markup `text`
default_role = 'any'

# At the bottom of conf.py
def setup(app):
app.add_config_value('recommonmark_config', {
Expand Down
24 changes: 20 additions & 4 deletions python/src/nnabla/_variable.pyx
Expand Up @@ -134,6 +134,12 @@ cdef class Variable:
is invoked immediately when :function:`nnabla.auto_forward`
or :function:`nnabla.set_auto_forward(True)` is used.
Note:
Relational operators :code:`==` and :code:`!=` of two :obj:`Variable` s are
defined as an address comparison of underlying C++ instances
(:code:`nbla::Variable`). Also, :func:`hash` function, which is often used
in a key for :obj:`set` and :obj:`dict`, is based on the address.
See also:
`Python API Tutorial
<http://nnabla.readthedocs.io/en/latest/python/tutorial/python_api.html>`_.
Expand Down Expand Up @@ -206,14 +212,18 @@ cdef class Variable:
self.shape, self.need_grad, hex(id(self)))

def __eq__(self, other):
'''Determine equality by comparing the address of the C++ objects.
'''Equal operator compares the addresses of underlying C++ objects
(``nbla::Variable``).
'''
return (< Variable > self).varp == ( < Variable ?> other).varp
cdef CVariable* v = (< Variable > self).varp.variable().get()
cdef CVariable* w = (< Variable ?> other).varp.variable().get()
return v == w

def __hash__(self):
'''Returns hash of the integer address of holding C++ object.
'''
return hash(< intptr_t > (( < Variable > self).varp))
cdef CVariable* v = ( < Variable > self).varp.variable().get()
return hash(< intptr_t > (v))

def apply(self, **kwargs):
'''Helper for setting property, then return self.
Expand Down Expand Up @@ -619,6 +629,12 @@ cdef class Variable:
Returns: nnabla._variable.Variable
Note:
The unlinked Variable behaves equivalent to the original variable
in a comparison operator and hash function regardless whether or
not the `need_grad` attribute is changed.
See a note in the `Variable` class documentation.
Example:
.. code-block:: python
Expand All @@ -640,7 +656,7 @@ cdef class Variable:
# None
"""
var = Variable.create_from_cvariable(self.varp.variable().get().view())
var = Variable.create_from_cvariable(self.varp.variable())
if need_grad is not None:
var.need_grad = need_grad
else:
Expand Down
145 changes: 94 additions & 51 deletions python/src/nnabla/parameter.py
Expand Up @@ -176,73 +176,116 @@ def set_parameter(key, param):
current_scope[names[0]] = param


def _create_parameter_by_initializer(initializer, shape, need_grad):

# If initializer is not set, just returns a new variable with zeros.
if initializer is None:
assert shape is not None
param = nn.Variable(shape, need_grad=need_grad)
param.data.zero() # Initialize with zero.
return param

# Initialize by a numpy array.
if isinstance(initializer, numpy.ndarray): # numpy init
assert (shape is None) or (tuple(shape) == initializer.shape)
return nn.Variable.from_numpy_array(
initializer, need_grad=need_grad)

# Initialize by Initializer or callable object which takes shape as an argument.
if callable(initializer):
assert shape is not None
return nn.Variable.from_numpy_array(
initializer(shape=shape), need_grad=need_grad)

# Invalid initialzier argument.
raise ValueError(
"`initializer` must be either the :obj:`numpy.ndarray`"
" or an instance inherited from `nnabla.initializer.BaseInitializer`.")


def get_parameter_or_create(name, shape=None, initializer=None, need_grad=True,
as_need_grad=None):
"""
Returns an existing parameter variable with the provided name.
Returns an existing parameter variable in current parameter scope
with the provided name.
If a variable with the provided name does not exist,
a new variable with the provided name is returned.
a new variable is created and registered to the current parameter scope
with the name, then returned.
Args:
name(str): The name under the current scope. If it already exists, the name is queried from the
parameter manager.
shape (:obj:`tuple` of :obj:`int`): Shape of created parameter. The shape of the specified
parameter must match with this shape. The default is None which is only valid if initializer is given as an :obj:`numpy.ndarray`.
initializer (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): An initialization function to be applied to the parameter. :obj:`numpy.ndarray` can also be given to initialize parameters from numpy array data.
need_grad (bool):
Register the parameter with the specified ``need_grad`` flag.
The default is True. If the flag is different from the previously
specified one, the flag will be overwritten, but the values will be
kept.
as_need_grad (bool):
Get a parameter variable with the specified ``need_grad`` flag.
Note that this doesn't overwrite the flag of the registered parameter
variable with the provided name. Instead, if the given flag
mismatches with the previously registered ``need_grad`` flag, it
returns a new variable referring to the same array contents but with
``need_grad=as_need_grad``.
name(str):
The name under the current scope. If it already exists, the name
is queried from the parameter manager.
shape (:obj:`tuple` of :obj:`int`):
Shape of created parameter. The shape of the specified
parameter must match with this shape. The default is None which is
only valid if initializer is given as an :obj:`numpy.ndarray`.
initializer (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`):
An initialization function to be applied to the parameter.
:obj:`numpy.ndarray` can also be given to initialize parameters
from numpy array data.
need_grad (bool):
Register the parameter with the specified ``need_grad`` flag.
The default is True. If the flag is different from the previously
specified one, the flag will be overwritten, but the values will be
kept.
as_need_grad (bool):
Get a parameter variable with the specified ``need_grad`` flag.
Note that this doesn't overwrite the flag of the registered parameter
variable with the provided name. Instead, if the given flag
mismatches with the previously registered ``need_grad`` flag, it
returns a new variable referring to the same array contents but with
``need_grad=as_need_grad``.
Note:
It returns a `Variable` which is unlinked from the
registered one in the current parmeter scope
(using :py:meth:`nnabla.Variable.get_unlinked_variable`).
That means changing a `need_grad` attribute doesn't affect
the variable existing in the current parameter scope.
"""

# Resolve delimiter '/' in parameter name.
names = name.split('/')
if len(names) > 1:
with parameter_scope(names[0]):
return get_parameter_or_create('/'.join(names[1:]), shape, initializer, need_grad, as_need_grad)

# Set need_grad if as_need_grad is not specified.
if as_need_grad is None:
as_need_grad = need_grad

# Try to find a existing parameter.
param = get_parameter(names[0])
if param is None:
class VariableInfo:
pass
info = VariableInfo()
info.initializer = initializer

if initializer is not None:
if isinstance(initializer, numpy.ndarray): # numpy init
param = nn.Variable(initializer.shape, need_grad=need_grad)
param.d = initializer
# initializer init
elif isinstance(initializer, nn.initializer.BaseInitializer) or initializer.__name__ == "<lambda>":
assert shape is not None
param = nn.Variable(shape, need_grad=need_grad)
param.d = initializer(shape=param.shape)
else:
raise ValueError(
"`initializer` must be either the :obj:`numpy.ndarray` or an instance inherited from `nnabla.initializer.BaseInitializer`.")
else: # default init
assert shape is not None
param = nn.Variable(shape, need_grad=need_grad)
set_parameter(name, param)
else:

# If found, verify shape and flags, and returns it.
if param is not None:
if param.shape != tuple(shape):
raise ValueError(
'The size of existing parameter "{}" {} is different from the size of new parameter {}.\n'
'To clear all parameters, call nn.clear_parameters().'.format(name, param.shape, tuple(shape)))
'The size of existing parameter "{}" {} is different from the '
'size of new parameter {}.\n'
'To clear all parameters, call nn.clear_parameters().'.format(
name, param.shape, tuple(shape)))

if need_grad != param.need_grad:
param.need_grad = need_grad
if as_need_grad is None:
return param
if param.need_grad != as_need_grad:
param = param.get_unlinked_variable(need_grad=as_need_grad)
return param
set_parameter(name, param)
return param.get_unlinked_variable(need_grad=as_need_grad)

# TODO: Initializer info must be stored in Variable?
# class VariableInfo:
# pass
# info = VariableInfo()
# info.initializer = initializer

# Create a new parameter using specified configuration,
# and write it to current scope..
param = _create_parameter_by_initializer(initializer, shape, need_grad)
set_parameter(name, param)
return param.get_unlinked_variable(need_grad=as_need_grad)


def get_parameters(params=None, path='', grad_only=True):
Expand Down Expand Up @@ -285,10 +328,10 @@ def clear_parameters():
def set_parameter_from_proto(proto):
for parameter in proto.parameter:
var = get_parameter_or_create(
parameter.variable_name, parameter.shape.dim)
parameter.variable_name, parameter.shape.dim,
need_grad=parameter.need_grad)
param = numpy.reshape(parameter.data, parameter.shape.dim)
var.d = param
var.need_grad = parameter.need_grad


def load_parameters(path, proto=None, needs_proto=False):
Expand Down
15 changes: 8 additions & 7 deletions python/test/test_parameter.py
Expand Up @@ -41,23 +41,24 @@ def test_get_parameter_or_create_need_grad():
import nnabla as nn
from nnabla.parameter import get_parameter_or_create
nn.clear_parameters()
param1 = get_parameter_or_create('p/param1', (2, 3, 4, 5), need_grad=True)
key1 = 'p/param1'
param1 = get_parameter_or_create(key1, (2, 3, 4, 5), need_grad=True)
p1d = np.random.randn(*param1.shape).astype(np.float32)
p1g = np.random.randn(*param1.shape).astype(np.float32)
param1.d = p1d
param1.g = p1g
param1_f = get_parameter_or_create(
'p/param1', param1.shape, need_grad=False)
assert not param1_f.need_grad
assert not param1.need_grad
key1, param1.shape, need_grad=False)
assert not nn.get_parameters(grad_only=False)[key1].need_grad
param1_f = get_parameter_or_create(
key1, param1.shape, need_grad=True)
assert nn.get_parameters()[key1].need_grad
assert np.all(param1.d == p1d)
assert np.all(param1.d == param1_f.d)
param1.d = 1
assert np.all(param1_f.d == 1)
param1_f2 = get_parameter_or_create(
'p/param1', param1.shape, need_grad=True, as_need_grad=False)
assert param1.need_grad
assert param1_f.need_grad
key1, param1.shape, need_grad=True, as_need_grad=False)
assert not param1_f2.need_grad
nn.clear_parameters()

Expand Down
34 changes: 34 additions & 0 deletions python/test/test_variable.py
Expand Up @@ -255,3 +255,37 @@ def test_function_references():
del h2

assert len(v.function_references) == 0


@pytest.mark.parametrize("f", [lambda x: x, hash])
def test_variable_equality_and_hash(f):
shape = (2, 3, 4)
x = nn.Variable(shape)
assert f(x) == f(x)

y = nn.Variable(shape)
assert f(x) != f(y)

y = x.get_unlinked_variable()
assert f(x) == f(y)

y.need_grad = True
assert f(x) == f(y)


def test_variable_set():
# Testing hash and equality operator via set
shape = (2, 3, 4)
x = nn.Variable(shape)
s = set()
s.add(x)
assert x in s

y = nn.Variable(shape)
assert y not in s

y = x.get_unlinked_variable()
assert y in s

y.need_grad = True
assert y in s

0 comments on commit 6cabca6

Please sign in to comment.