Skip to content

Commit

Permalink
Replace __call__ with forward
Browse files Browse the repository at this point in the history
  • Loading branch information
mitmul committed Jun 13, 2018
1 parent 65b4d24 commit 4a5706e
Show file tree
Hide file tree
Showing 93 changed files with 214 additions and 220 deletions.
20 changes: 7 additions & 13 deletions chainer/link.py
Expand Up @@ -108,11 +108,11 @@ def __init__(self, n_in, n_out):
self.b = chainer.Parameter(
initializers.Zero(), (n_out,))
def __call__(self, x):
def forward(self, x):
return F.linear(x, self.W, self.b)
This example shows that a user can define arbitrary parameters and use
them in any methods. Links typically implement the ``__call__``
them in any methods. Links typically implement the ``forward``
operator, although they can also provide other methods to implement the
forward propagation.
Expand Down Expand Up @@ -194,14 +194,8 @@ def __init__(self):
finally:
self._within_init_scope = old_flag

def __call__(self, *args, **kwargs):
try:
forward = self.forward
except AttributeError:
raise TypeError(
'{} object has neither \'Link.__call__\' method overridden'
' nor \'forward\' method defined.'.format(self))
return forward(*args, **kwargs)
def forward(self, *args, **kwargs):
return self.forward(*args, **kwargs)

def __setattr__(self, name, value):
if self.within_init_scope and isinstance(value, variable.Parameter):
Expand Down Expand Up @@ -643,7 +637,7 @@ def __init__(self):
None, 64, 3, 1, 1, nobias=True)
self.bn = L.BatchNormalization(64)
def __call__(self, x):
def forward(self, x):
return F.relu(self.bn(self.conv(x)))
net = ConvBNReLU().repeat(16, mode='init')
Expand Down Expand Up @@ -772,15 +766,15 @@ def __init__(self, n_in, n_hidden, n_out):
self.layer2 = L.Linear(n_hidden, n_hidden)
self.layer3 = L.Linear(n_hidden, n_out)
def __call__(self, x):
def forward(self, x):
# Forward propagation
h1 = F.relu(self.layer1(x))
h2 = F.relu(self.layer2(h1))
return self.layer3(h2)
Child links are registered via the assignment within a
``with self.init_scope():`` block. The forward propagation is often
implemented as the ``__call__`` operator as the above example, though
implemented as the ``forward`` operator as the above example, though
it is not mandatory.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/activation/maxout.py
Expand Up @@ -80,7 +80,7 @@ def __init__(self, in_size, out_size, pool_size,
self.out_size = out_size
self.pool_size = pool_size

def __call__(self, x):
def forward(self, x):
"""Applies the maxout layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/activation/prelu.py
Expand Up @@ -27,7 +27,7 @@ def __init__(self, shape=(), init=0.25):
with self.init_scope():
self.W = variable.Parameter(init, shape)

def __call__(self, x):
def forward(self, x):
"""Applies the parametric ReLU activation function.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/activation/simplified_dropconnect.py
Expand Up @@ -71,7 +71,7 @@ def __init__(self, in_size, out_size, ratio=.5, nobias=False,
def _initialize_params(self, in_size):
self.W.initialize((self.out_size, in_size))

def __call__(self, x, train=True, mask=None, use_batchwise_mask=True):
def forward(self, x, train=True, mask=None, use_batchwise_mask=True):
"""Applies the simplified dropconnect layer.
Args:
Expand Down
6 changes: 3 additions & 3 deletions chainer/links/activation/swish.py
Expand Up @@ -35,7 +35,7 @@ def __init__(self, n_units, n_out):
self.l2 = L.Linear(None, n_units)
self.l3 = L.Linear(None, n_out)
def __call__(self, x):
def forward(self, x):
h1 = F.relu(self.l1(x))
h2 = F.relu(self.l2(h1))
return self.l3(h2)
Expand All @@ -53,7 +53,7 @@ def __init__(self, n_units, n_out):
self.s2 = L.Swish(None)
self.l3 = L.Linear(None, n_out)
def __call__(self, x):
def forward(self, x):
h1 = self.s1(self.l1(x))
h2 = self.s2(self.l2(h1))
return self.l3(h2)
Expand All @@ -77,7 +77,7 @@ def __init__(self, beta_shape, beta_init=1.0):
beta_init = initializers.Constant(beta_init)
self.beta = variable.Parameter(beta_init)

def __call__(self, x):
def forward(self, x):
"""Applies the Swish activation function.
Args:
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/caffe/caffe_function.py
Expand Up @@ -173,8 +173,8 @@ def __init__(self, model_path):
'Skip the layer "%s", since CaffeFunction does not'
'support it' % layer.name)

def __call__(self, inputs, outputs, disable=(), **kwargs):
"""__call__(self, inputs, outputs, disable=())
def forward(self, inputs, outputs, disable=(), **kwargs):
"""forward(self, inputs, outputs, disable=())
Executes a sub-network of the network.
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/connection/bias.py
Expand Up @@ -17,7 +17,7 @@ class Bias(link.Link):
input is applied.
shape (tuple of ints): Shape of the learnable bias parameter. If
``None``, this link does not have learnable parameters so an
explicit bias needs to be given to its ``__call__`` method's second
explicit bias needs to be given to its ``forward`` method's second
input.
.. seealso:: See :func:`~chainer.functions.bias` for details.
Expand All @@ -38,7 +38,7 @@ def __init__(self, axis=1, shape=None):

self.axis = axis

def __call__(self, *xs):
def forward(self, *xs):
"""Applies broadcasted elementwise summation.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/bilinear.py
Expand Up @@ -89,7 +89,7 @@ def __init__(self, left_size, right_size, out_size, nobias=False,
self.V2 = variable.Parameter(initialV2, V2_shape)
self.b = variable.Parameter(initialb, b_shape)

def __call__(self, e1, e2):
def forward(self, e1, e2):
"""Applies the bilinear function to inputs and the internal parameters.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/convolution_2d.py
Expand Up @@ -156,7 +156,7 @@ def _initialize_params(self, in_channels):
W_shape = (self.out_channels, int(in_channels / self.groups), kh, kw)
self.W.initialize(W_shape)

def __call__(self, x):
def forward(self, x):
"""Applies the convolution layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/convolution_nd.py
Expand Up @@ -82,7 +82,7 @@ def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
initial_bias = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(initial_bias, out_channels)

def __call__(self, x):
def forward(self, x):
"""Applies N-dimensional convolution layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/deconvolution_2d.py
Expand Up @@ -175,7 +175,7 @@ def _initialize_params(self, in_channels):
W_shape = (in_channels, int(self.out_channels / self.groups), kh, kw)
self.W.initialize(W_shape)

def __call__(self, x):
def forward(self, x):
if self.W.data is None:
self._initialize_params(x.shape[1])
return deconvolution_2d.deconvolution_2d(
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/deconvolution_nd.py
Expand Up @@ -79,7 +79,7 @@ def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
initial_bias = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(initial_bias, out_channels)

def __call__(self, x):
def forward(self, x):
return deconvolution_nd.deconvolution_nd(
x, self.W, b=self.b, stride=self.stride, pad=self.pad,
outsize=self.outsize, dilate=self.dilate, groups=self.groups)
4 changes: 2 additions & 2 deletions chainer/links/connection/deformable_convolution_2d.py
Expand Up @@ -69,7 +69,7 @@ def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
in_channels, out_channels, ksize, stride, pad,
deform_nobias, deform_initialW, deform_initial_bias)

def __call__(self, x):
def forward(self, x):
"""Applies the deformable convolution.
Args:
Expand Down Expand Up @@ -121,7 +121,7 @@ def _initialize_params(self, in_channels):
if self.b is not None:
self.b.initialize(self.out_channels)

def __call__(self, x, offset):
def forward(self, x, offset):
if self.W.data is None:
self._initialize_params(x.shape[1])
return deformable_convolution_2d_sampler(
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/depthwise_convolution_2d.py
Expand Up @@ -76,7 +76,7 @@ def _initialize_params(self, in_channels):
if self.b is not None:
self.b.initialize(self.channel_multiplier * in_channels)

def __call__(self, x):
def forward(self, x):
"""Applies the depthwise convolution layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/dilated_convolution_2d.py
Expand Up @@ -118,7 +118,7 @@ def _initialize_params(self, in_channels):
W_shape = (self.out_channels, in_channels, kh, kw)
self.W.initialize(W_shape)

def __call__(self, x):
def forward(self, x):
"""Applies the convolution layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/embed_id.py
Expand Up @@ -57,7 +57,7 @@ def __init__(self, in_size, out_size, initialW=None, ignore_label=None):
initialW = normal.Normal(1.0)
self.W = variable.Parameter(initialW, (in_size, out_size))

def __call__(self, x):
def forward(self, x):
"""Extracts the word embedding of given IDs.
Args:
Expand Down
12 changes: 6 additions & 6 deletions chainer/links/connection/gru.py
Expand Up @@ -105,7 +105,7 @@ class StatelessGRU(GRUBase):
"""

def __call__(self, h, x):
def forward(self, h, x):
r = sigmoid.sigmoid(self.W_r(x) + self.U_r(h))
z = sigmoid.sigmoid(self.W_z(x) + self.U_z(h))
h_bar = tanh.tanh(self.W(x) + self.U(r * h))
Expand Down Expand Up @@ -223,7 +223,7 @@ def set_state(self, h):
def reset_state(self):
self.h = None

def __call__(self, x):
def forward(self, x):
z = self.W_z(x)
h_bar = self.W(x)
if self.h is not None:
Expand Down Expand Up @@ -261,15 +261,15 @@ class GRU(StatefulGRU):
"""

def __call__(self, *args):
"""__call__(self, x)
def forward(self, *args):
"""forward(self, x)
Does forward propagation.
"""

n_args = len(args)
msg = ("Invalid argument. The length of GRU.__call__ must be 1. "
msg = ("Invalid argument. The length of GRU.forward must be 1. "
"But %d is given. " % n_args)

if n_args == 0 or n_args >= 3:
Expand All @@ -281,4 +281,4 @@ def __call__(self, *args):
"Use chainer.links.StatelessGRU instead.")
raise ValueError(msg)

return super(GRU, self).__call__(args[0])
return super(GRU, self).forward(args[0])
2 changes: 1 addition & 1 deletion chainer/links/connection/highway.py
Expand Up @@ -60,7 +60,7 @@ def __init__(self, in_out_size, nobias=False, activate=relu.relu,
in_out_size, in_out_size, nobias=nobias,
initialW=init_Wt, initial_bias=init_bt)

def __call__(self, x):
def forward(self, x):
"""Computes the output of the Highway module.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/inception.py
Expand Up @@ -61,7 +61,7 @@ def __init__(self, in_channels, out1, proj3, out3, proj5, out5, proj_pool,
in_channels, proj_pool, 1, initialW=conv_init,
initial_bias=bias_init)

def __call__(self, x):
def forward(self, x):
"""Computes the output of the Inception module.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/inceptionbn.py
Expand Up @@ -92,7 +92,7 @@ def __init__(self, in_channels, out1, proj3, out3, proj33, out33,
self.poolpn = batch_normalization.BatchNormalization(
proj_pool, dtype=dtype)

def __call__(self, x):
def forward(self, x):
outs = []

if self.out1 > 0:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/linear.py
Expand Up @@ -113,7 +113,7 @@ def __init__(self, in_size, out_size=None, nobias=False,
def _initialize_params(self, in_size):
self.W.initialize((self.out_size, in_size))

def __call__(self, x):
def forward(self, x):
"""Applies the linear layer.
Args:
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/local_convolution_2d.py
Expand Up @@ -87,7 +87,7 @@ def _initialize_params(self, in_channels, in_size):
if not self.nobias:
self.b.initialize(bias_shape)

def __call__(self, x):
def forward(self, x):
"""Applies the local convolution layer.
Args:
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/connection/lstm.py
Expand Up @@ -117,7 +117,7 @@ class StatelessLSTM(LSTMBase):
"""

def __call__(self, c, h, x):
def forward(self, c, h, x):
"""Returns new cell state and updated output of LSTM.
Args:
Expand Down Expand Up @@ -289,7 +289,7 @@ def reset_state(self):
"""
self.c = self.h = None

def __call__(self, x):
def forward(self, x):
"""Updates the internal state and returns the LSTM outputs.
Args:
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/connection/mgu.py
Expand Up @@ -26,7 +26,7 @@ def _call_mgu(self, h, x):

class StatelessMGU(MGUBase):

__call__ = MGUBase._call_mgu
forward = MGUBase._call_mgu


class StatefulMGU(MGUBase):
Expand Down Expand Up @@ -58,7 +58,7 @@ def set_state(self, h):
def reset_state(self):
self.h = None

def __call__(self, x):
def forward(self, x):
if self.h is None:
n_batch = x.shape[0]
h_data = self.xp.zeros(
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/mlp_convolution_2d.py
Expand Up @@ -84,7 +84,7 @@ def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
super(MLPConvolution2D, self).__init__(*convs)
self.activation = activation

def __call__(self, x):
def forward(self, x):
"""Computes the output of the mlpconv layer.
Args:
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/connection/n_step_lstm.py
Expand Up @@ -31,8 +31,8 @@ class NStepLSTMBase(n_step_rnn.NStepRNNBase):

n_weights = 8

def __call__(self, hx, cx, xs, **kwargs):
"""__call__(self, hx, cx, xs)
def forward(self, hx, cx, xs, **kwargs):
"""forward(self, hx, cx, xs)
Calculate all hidden states and cell states.
Expand Down
4 changes: 2 additions & 2 deletions chainer/links/connection/n_step_rnn.py
Expand Up @@ -126,8 +126,8 @@ def n_cells(self):
"""
return NotImplementedError

def __call__(self, hx, xs, **kwargs):
"""__call__(self, hx, xs)
def forward(self, hx, xs, **kwargs):
"""forward(self, hx, xs)
Calculate all hidden states and cell states.
Expand Down
2 changes: 1 addition & 1 deletion chainer/links/connection/parameter.py
Expand Up @@ -26,7 +26,7 @@ def __init__(self, array):
if isinstance(array, cuda.ndarray):
self.to_gpu(cuda.get_device_from_array(array))

def __call__(self, volatile='off'):
def forward(self, volatile='off'):
"""Returns the parameter variable.
Args:
Expand Down

0 comments on commit 4a5706e

Please sign in to comment.