/
convolution_2d.py
162 lines (123 loc) · 5.75 KB
/
convolution_2d.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
from chainer.functions.connection import convolution_2d
from chainer import initializers
from chainer import link
from chainer.utils import argument
from chainer import variable
class Convolution2D(link.Link):
"""__init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None)
Two-dimensional convolutional layer.
This link wraps the :func:`~chainer.functions.convolution_2d` function and
holds the filter weight and bias vector as parameters.
The output of this function can be non-deterministic when it uses cuDNN.
If ``chainer.configuration.config.deterministic`` is ``True`` and
cuDNN version is >= v3, it forces cuDNN to use a deterministic algorithm.
Convolution links can use a feature of cuDNN called autotuning, which
selects the most efficient CNN algorithm for images of fixed-size,
can provide a significant performance boost for fixed neural nets.
To enable, set `chainer.using_config('autotune', True)`
.. warning::
``deterministic`` argument is not supported anymore since v2.
Instead, use ``chainer.using_config('cudnn_deterministic', value``
(value is either ``True`` or ``False``).
See :func:`chainer.using_config`.
Args:
in_channels (int or None): Number of channels of input arrays.
If ``None``, parameter initialization will be deferred until the
first forward data pass at which time the size will be determined.
out_channels (int): Number of channels of output arrays.
ksize (int or pair of ints): Size of filters (a.k.a. kernels).
``ksize=k`` and ``ksize=(k, k)`` are equivalent.
stride (int or pair of ints): Stride of filter applications.
``stride=s`` and ``stride=(s, s)`` are equivalent.
pad (int or pair of ints): Spatial padding width for input arrays.
``pad=p`` and ``pad=(p, p)`` are equivalent.
nobias (bool): If ``True``, then this link does not use the bias term.
initialW (:ref:`initializer <initializer>`): Initializer to
initialize the weight. When it is :class:`numpy.ndarray`,
its ``ndim`` should be 4.
initial_bias (:ref:`initializer <initializer>`): Initializer to
initialize the bias. If ``None``, the bias will be initialized to
zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1.
.. seealso::
See :func:`chainer.functions.convolution_2d` for the definition of
two-dimensional convolution.
Attributes:
W (~chainer.Variable): Weight parameter.
b (~chainer.Variable): Bias parameter.
.. admonition:: Example
There are several ways to make a Convolution2D link.
Let an input vector ``x`` be:
>>> x = np.arange(1 * 3 * 10 * 10, dtype='f').reshape(1, 3, 10, 10)
1. Give the first three arguments explicitly:
>>> l = L.Convolution2D(3, 7, 5)
>>> y = l(x)
>>> y.shape
(1, 7, 6, 6)
2. Omit ``in_channels`` or fill it with ``None``:
The below two cases are the same.
>>> l = L.Convolution2D(7, 5)
>>> y = l(x)
>>> y.shape
(1, 7, 6, 6)
>>> l = L.Convolution2D(None, 7, 5)
>>> y = l(x)
>>> y.shape
(1, 7, 6, 6)
When you omit the first argument, you need to specify the other
subsequent arguments from ``stride`` as keyword auguments. So the
below two cases are the same.
>>> l = L.Convolution2D(7, 5, stride=1, pad=0)
>>> y = l(x)
>>> y.shape
(1, 7, 6, 6)
>>> l = L.Convolution2D(None, 7, 5, 1, 0)
>>> y = l(x)
>>> y.shape
(1, 7, 6, 6)
""" # NOQA
def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None, **kwargs):
super(Convolution2D, self).__init__()
argument.check_unexpected_kwargs(
kwargs, deterministic="deterministic argument is not "
"supported anymore. "
"Use chainer.using_config('cudnn_deterministic', value) "
"context where value is either `True` or `False`.")
argument.assert_kwargs_empty(kwargs)
if ksize is None:
out_channels, ksize, in_channels = in_channels, out_channels, None
self.ksize = ksize
self.stride = _pair(stride)
self.pad = _pair(pad)
self.out_channels = out_channels
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if in_channels is not None:
self._initialize_params(in_channels)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = 0
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_channels)
def _initialize_params(self, in_channels):
kh, kw = _pair(self.ksize)
W_shape = (self.out_channels, in_channels, kh, kw)
self.W.initialize(W_shape)
def __call__(self, x):
"""Applies the convolution layer.
Args:
x (~chainer.Variable): Input image.
Returns:
~chainer.Variable: Output of the convolution.
"""
if self.W.data is None:
self._initialize_params(x.shape[1])
return convolution_2d.convolution_2d(
x, self.W, self.b, self.stride, self.pad)
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x