-
Notifications
You must be signed in to change notification settings - Fork 1.4k
/
function_link.py
1210 lines (940 loc) · 44.5 KB
/
function_link.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import contextlib
import six
import typing as tp # NOQA
import unittest
import numpy
import chainer
from chainer import backend
from chainer import initializers
from chainer.testing import array as array_module
from chainer import utils
class _TestError(AssertionError):
"""Parent class to Chainer test errors."""
@classmethod
def check(cls, expr, message):
if not expr:
raise cls(message)
@classmethod
def fail(cls, message, exc=None):
if exc is not None:
utils._raise_from(cls, message, exc)
raise cls(message)
@classmethod
@contextlib.contextmanager
def raise_if_fail(cls, message, error_types=AssertionError):
try:
yield
except error_types as e:
cls.fail(message, e)
class FunctionTestError(_TestError):
"""Raised when the target function is implemented incorrectly."""
pass
class LinkTestError(_TestError):
"""Raised when the target link is implemented incorrectly."""
pass
class InitializerArgument(object):
"""Class to hold a pair of initializer argument value and actual
initializer-like.
This class is meant to be included in the return value from
:meth:`chainer.testing.LinkTestCase.get_initializers` in
:class:`chainer.testing.LinkTestCase` if the argument and the actual
initializer in the link do not directly correspond.
In that case, the first element should correspond to the argument passed to
the constructor of the link, and the second element correspond to the
actual initializer-like object used by the link.
"""
def __init__(self, argument_value, expected_initializer):
if expected_initializer is None:
raise ValueError('Expected initialized cannot be None.')
initializers._check_is_initializer_like(expected_initializer)
self.argument_value = argument_value
self.expected_initializer = expected_initializer
class FunctionTestBase(object):
backend_config = None
check_forward_options = None
check_backward_options = None
check_double_backward_options = None
skip_forward_test = False
skip_backward_test = False
skip_double_backward_test = False
dodge_nondifferentiable = False
contiguous = None
def __init__(self, *args, **kwargs):
super(FunctionTestBase, self).__init__(*args, **kwargs)
self.check_forward_options = {}
self.check_backward_options = {}
self.check_double_backward_options = {}
def before_test(self, test_name):
pass
def forward(self, inputs, device):
raise NotImplementedError('forward() is not implemented.')
def forward_expected(self, inputs):
raise NotImplementedError('forward_expected() is not implemented.')
def generate_inputs(self):
raise NotImplementedError('generate_inputs() is not implemented.')
def generate_grad_outputs(self, outputs_template):
grad_outputs = tuple([
numpy.random.uniform(-1, 1, a.shape).astype(a.dtype)
for a in outputs_template])
return grad_outputs
def generate_grad_grad_inputs(self, inputs_template):
grad_grad_inputs = tuple([
numpy.random.uniform(-1, 1, a.shape).astype(a.dtype)
for a in inputs_template])
return grad_grad_inputs
def check_forward_outputs(self, outputs, expected_outputs):
assert isinstance(outputs, tuple)
assert isinstance(expected_outputs, tuple)
assert all(isinstance(a, chainer.get_array_types()) for a in outputs)
assert all(
isinstance(a, chainer.get_array_types()) for a in expected_outputs)
_check_arrays_equal(
outputs, expected_outputs, FunctionTestError,
**self.check_forward_options)
def _to_noncontiguous_as_needed(self, contig_arrays):
if self.contiguous is None:
# non-contiguous
return array_module._as_noncontiguous_array(contig_arrays)
if self.contiguous == 'C':
# C-contiguous
return contig_arrays
assert False, (
'Invalid value of `contiguous`: {}'.format(self.contiguous))
def _generate_inputs(self):
inputs = self.generate_inputs()
_check_array_types(inputs, backend.CpuDevice(), 'generate_inputs')
return inputs
def _generate_grad_outputs(self, outputs_template):
grad_outputs = self.generate_grad_outputs(outputs_template)
_check_array_types(
grad_outputs, backend.CpuDevice(), 'generate_grad_outputs')
return grad_outputs
def _generate_grad_grad_inputs(self, inputs_template):
grad_grad_inputs = self.generate_grad_grad_inputs(inputs_template)
_check_array_types(
grad_grad_inputs, backend.CpuDevice(), 'generate_grad_grad_inputs')
return grad_grad_inputs
def _forward_expected(self, inputs):
outputs = self.forward_expected(inputs)
_check_array_types(
outputs, backend.CpuDevice(), 'forward_expected')
return outputs
def _forward(self, inputs, backend_config):
assert all(isinstance(a, chainer.Variable) for a in inputs)
with backend_config:
outputs = self.forward(inputs, backend_config.device)
_check_variable_types(
outputs, backend_config.device, 'forward', FunctionTestError)
return outputs
def run_test_forward(self, backend_config):
# Runs the forward test.
if self.skip_forward_test:
raise unittest.SkipTest('skip_forward_test is set')
self.backend_config = backend_config
self.test_name = 'test_forward'
self.before_test(self.test_name)
cpu_inputs = self._generate_inputs()
cpu_inputs = self._to_noncontiguous_as_needed(cpu_inputs)
inputs_copied = [a.copy() for a in cpu_inputs]
# Compute expected outputs
cpu_expected = self._forward_expected(cpu_inputs)
# Compute actual outputs
inputs = backend_config.get_array(cpu_inputs)
inputs = self._to_noncontiguous_as_needed(inputs)
outputs = self._forward(
tuple([
chainer.Variable(a, requires_grad=a.dtype.kind == 'f')
for a in inputs]),
backend_config)
# Check inputs has not changed
indices = []
for i in range(len(inputs)):
try:
array_module.assert_allclose(
inputs_copied[i], inputs[i], atol=0, rtol=0)
except AssertionError:
indices.append(i)
if len(indices) > 0:
FunctionTestError.fail(
'Input arrays have been modified during forward.\n'
'Indices of modified inputs: {}\n'
'Input array shapes and dtypes: {}\n'.format(
', '.join(str(i) for i in indices),
utils._format_array_props(inputs)))
self.check_forward_outputs(
tuple([var.array for var in outputs]),
cpu_expected)
def run_test_backward(self, backend_config):
# Runs the backward test.
if self.skip_backward_test:
raise unittest.SkipTest('skip_backward_test is set')
# avoid cyclic import
from chainer import gradient_check
self.backend_config = backend_config
self.test_name = 'test_backward'
self.before_test(self.test_name)
def f(*args):
return self._forward(args, backend_config)
def do_check():
inputs = self._generate_inputs()
outputs = self._forward_expected(inputs)
grad_outputs = self._generate_grad_outputs(outputs)
inputs = backend_config.get_array(inputs)
grad_outputs = backend_config.get_array(grad_outputs)
inputs = self._to_noncontiguous_as_needed(inputs)
grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
with FunctionTestError.raise_if_fail(
'backward is not implemented correctly'):
gradient_check.check_backward(
f, inputs, grad_outputs, dtype=numpy.float64,
detect_nondifferentiable=self.dodge_nondifferentiable,
**self.check_backward_options)
if self.dodge_nondifferentiable:
while True:
try:
do_check()
except gradient_check.NondifferentiableError:
continue
else:
break
else:
do_check()
def run_test_double_backward(self, backend_config):
# Runs the double-backward test.
if self.skip_double_backward_test:
raise unittest.SkipTest('skip_double_backward_test is set')
# avoid cyclic import
from chainer import gradient_check
self.backend_config = backend_config
self.test_name = 'test_double_backward'
self.before_test(self.test_name)
def f(*args):
return self._forward(args, backend_config)
def do_check():
inputs = self._generate_inputs()
outputs = self._forward_expected(inputs)
grad_outputs = self._generate_grad_outputs(outputs)
grad_grad_inputs = self._generate_grad_grad_inputs(inputs)
# Drop ggx corresponding to non-differentiable inputs.
grad_grad_inputs = [
ggx for ggx in grad_grad_inputs if ggx.dtype.kind == 'f']
inputs = backend_config.get_array(inputs)
grad_outputs = backend_config.get_array(grad_outputs)
grad_grad_inputs = backend_config.get_array(grad_grad_inputs)
inputs = self._to_noncontiguous_as_needed(inputs)
grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
grad_grad_inputs = (
self._to_noncontiguous_as_needed(grad_grad_inputs))
with backend_config:
with FunctionTestError.raise_if_fail(
'double backward is not implemented correctly'):
gradient_check.check_double_backward(
f, inputs, grad_outputs, grad_grad_inputs,
dtype=numpy.float64,
detect_nondifferentiable=self.dodge_nondifferentiable,
**self.check_double_backward_options)
if self.dodge_nondifferentiable:
while True:
try:
do_check()
except gradient_check.NondifferentiableError:
continue
else:
break
else:
do_check()
class FunctionTestCase(FunctionTestBase, unittest.TestCase):
"""A base class for function test cases.
Function test cases can inherit from this class to define a set of function
tests.
.. rubric:: Required methods
Each concrete class must at least override the following three methods.
``forward(self, inputs, device)``
Implements the target forward function.
``inputs`` is a tuple of :class:`~chainer.Variable`\\ s.
This method is expected to return the output
:class:`~chainer.Variable`\\ s with the same array types as the inputs.
``device`` is the device corresponding to the input arrays.
``forward_expected(self, inputs)``
Implements the expectation of the target forward function.
``inputs`` is a tuple of :class:`numpy.ndarray`\\ s.
This method is expected to return the output
:class:`numpy.ndarray`\\ s.
``generate_inputs(self)``
Returns a tuple of input arrays of type :class:`numpy.ndarray`.
.. rubric:: Optional methods
Additionally the concrete class can override the following methods.
``before_test(self, test_name)``
A callback method called before each test.
Typically a skip logic is implemented by conditionally raising
:class:`unittest.SkipTest`.
``test_name`` is one of ``'test_forward'``, ``'test_backward'``, and
``'test_double_backward'``.
``generate_grad_outputs(self, outputs_template)``
Returns a tuple of output gradient arrays of type
:class:`numpy.ndarray`.
``outputs_template`` is a tuple of template arrays. The returned arrays
are expected to have the same shapes and dtypes as the template arrays.
``generate_grad_grad_inputs(self, inputs_template)``
Returns a tuple of the second order input gradient arrays of type
:class:`numpy.ndarray`.
``input_template`` is a tuple of template arrays. The returned arrays
are expected to have the same shapes and dtypes as the template arrays.
``check_forward_outputs(self, outputs, expected_outputs)``
Implements check logic of forward outputs. Typically additional check
can be done after calling ``super().check_forward_outputs``.
``outputs`` and ``expected_outputs`` are tuples of arrays.
In case the check fails, ``FunctionTestError`` should be raised.
.. rubric:: Configurable attributes
The concrete class can override the following attributes to control the
behavior of the tests.
``skip_forward_test`` (bool):
Whether to skip forward computation test. ``False`` by default.
``skip_backward_test`` (bool):
Whether to skip backward computation test. ``False`` by default.
``skip_double_backward_test`` (bool):
Whether to skip double-backward computation test. ``False`` by default.
``dodge_nondifferentiable`` (bool):
Enable non-differentiable point detection in numerical gradient
calculation. If the inputs returned by ``generate_inputs`` turns
out to be a non-differentiable point, the test will repeatedly resample
inputs until a differentiable point will be finally sampled.
``False`` by default.
``contiguous`` (None or 'C'):
Specifies the contiguousness of incoming arrays (i.e. inputs, output
gradients, and the second order input gradients). If ``None``, the
arrays will be non-contiguous as long as possible. If ``'C'``, the
arrays will be C-contiguous. ``None`` by default.
.. rubric:: Passive attributes
These attributes are automatically set.
``test_name`` (str):
The name of the test being run. It is one of ``'test_forward'``,
``'test_backward'``, and ``'test_double_backward'``.
``backend_config`` (:class:`~chainer.testing.BackendConfig`):
The backend configuration.
.. note::
This class assumes :func:`chainer.testing.inject_backend_tests`
is used together. See the example below.
.. admonition:: Example
.. testcode::
@chainer.testing.inject_backend_tests(
None,
[
{}, # CPU
{'use_cuda': True}, # GPU
])
class TestReLU(chainer.testing.FunctionTestCase):
# ReLU function has a non-differentiable point around zero, so
# dodge_nondifferentiable should be set to True.
dodge_nondifferentiable = True
def generate_inputs(self):
x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
return x,
def forward(self, inputs, device):
x, = inputs
return F.relu(x),
def forward_expected(self, inputs):
x, = inputs
expected = x.copy()
expected[expected < 0] = 0
return expected,
.. seealso:: :class:`~chainer.testing.LinkTestCase`
"""
def test_forward(self, backend_config):
"""Tests forward computation."""
self.run_test_forward(backend_config)
def test_backward(self, backend_config):
"""Tests backward computation."""
self.run_test_backward(backend_config)
def test_double_backward(self, backend_config):
"""Tests double-backward computation."""
self.run_test_double_backward(backend_config)
class _LinkTestBase(object):
backend_config = None
contiguous = None
# List of parameter names represented as strings.
# I.e. ('gamma', 'beta') for BatchNormalization.
param_names = ()
def before_test(self, test_name):
pass
def generate_params(self):
raise NotImplementedError('generate_params is not implemented.')
def generate_inputs(self):
raise NotImplementedError('generate_inputs is not implemented.')
def create_link(self, initializers):
raise NotImplementedError('create_link is not implemented.')
def forward(self, link, inputs, device):
outputs = link(*inputs)
if not isinstance(outputs, tuple):
outputs = outputs,
return outputs
def check_forward_outputs(self, outputs, expected_outputs):
assert isinstance(outputs, tuple)
assert isinstance(expected_outputs, tuple)
assert all(isinstance(a, chainer.get_array_types()) for a in outputs)
assert all(
isinstance(a, chainer.get_array_types()) for a in expected_outputs)
_check_arrays_equal(
outputs, expected_outputs, LinkTestError,
**self.check_forward_options)
def _generate_params(self):
params_init = self.generate_params()
if not isinstance(params_init, (tuple, list)):
raise TypeError(
'`generate_params` must return a tuple or a list.')
for init in params_init:
_check_generated_initializer(init)
return params_init
def _generate_inputs(self):
inputs = self.generate_inputs()
_check_array_types(inputs, backend.CpuDevice(), 'generate_inputs')
return inputs
def _create_link(self, initializers, backend_config):
link = self.create_link(initializers)
if not isinstance(link, chainer.Link):
raise TypeError(
'`create_link` must return a chainer.Link object.')
link.to_device(backend_config.device)
return link
def _create_initialized_link(self, inits, backend_config):
inits = [_get_initializer_argument_value(i) for i in inits]
link = self._create_link(inits, backend_config)
# Generate inputs and compute a forward pass to initialize the
# parameters.
inputs_np = self._generate_inputs()
inputs_xp = backend_config.get_array(inputs_np)
inputs_xp = self._to_noncontiguous_as_needed(inputs_xp)
input_vars = [chainer.Variable(i) for i in inputs_xp]
output_vars = self._forward(link, input_vars, backend_config)
outputs_xp = [v.array for v in output_vars]
link.cleargrads()
return link, inputs_xp, outputs_xp
def _forward(self, link, inputs, backend_config):
assert all(isinstance(x, chainer.Variable) for x in inputs)
with backend_config:
outputs = self.forward(link, inputs, backend_config.device)
_check_variable_types(
outputs, backend_config.device, 'forward', LinkTestError)
return outputs
def _to_noncontiguous_as_needed(self, contig_arrays):
if self.contiguous is None:
# non-contiguous
return array_module._as_noncontiguous_array(contig_arrays)
if self.contiguous == 'C':
# C-contiguous
return contig_arrays
assert False, (
'Invalid value of `contiguous`: {}'.format(self.contiguous))
class LinkTestCase(_LinkTestBase, unittest.TestCase):
"""A base class for link forward and backward test cases.
Link test cases can inherit from this class to define a set of link tests
for forward and backward computations.
.. rubric:: Required methods
Each concrete class must at least override the following methods.
``generate_params(self)``
Returns a tuple of initializers-likes. The tuple should contain an
initializer-like for each initializer-like argument, i.e. the
parameters to the link constructor. These will be passed to
``create_link``.
``create_link(self, initializers)``
Returns a link. The link should be initialized with the given
initializer-likes ``initializers``. ``initializers`` is a tuple of
same length as the number of parameters.
``generate_inputs(self)``
Returns a tuple of input arrays of type :class:`numpy.ndarray`.
``forward(self, link, inputs, device)``
Implements the target forward function.
``link`` is a link created by ``create_link`` and
``inputs`` is a tuple of :class:`~chainer.Variable`\\ s.
This method is expected to return the output
:class:`~chainer.Variable`\\ s with the same array types as the inputs.
``device`` is the device corresponding to the input arrays.
A default implementation is provided for links that only takes the
inputs defined in ``generate_inputs`` (wrapped in
:class:`~chainer.Variable`\\ s) and returns nothing but output
:class:`~chainer.Variable`\\ s in its forward computation.
.. rubric:: Optional methods
Each concrete class may override the following methods depending on the
skip flags ``skip_forward_test`` and ``skip_backward_test``.
``before_test(self, test_name)``
A callback method called before each test.
Typically a skip logic is implemented by conditionally raising
:class:`unittest.SkipTest`.
``test_name`` is one of ``'test_forward'`` and ``'test_backward'``.
``forward_expected(self, link, inputs)``
Implements the expectation of the target forward function.
``link`` is the initialized link that was used to compute the actual
forward which the results of this method will be compared against.
The link is guaranteed to reside on the CPU.
``inputs`` is a tuple of :class:`numpy.ndarray`\\ s.
This method is expected to return the output
:class:`numpy.ndarray`\\ s.
This method must be implemented if either ``skip_forward_test`` or
``skip_backward_test`` is ``False`` in which case forward or backward
tests are executed.
``generate_grad_outputs(self, outputs_template)``
Returns a tuple of output gradient arrays of type
:class:`numpy.ndarray`.
``outputs_template`` is a tuple of template arrays. The returned arrays
are expected to have the same shapes and dtypes as the template arrays.
``check_forward_outputs(self, outputs, expected_outputs)``
Implements check logic of forward outputs. Typically additional check
can be done after calling ``super().check_forward_outputs``.
``outputs`` and ``expected_outputs`` are tuples of arrays.
In case the check fails, ``LinkTestError`` should be raised.
.. rubric:: Attributes
The concrete class can override the following attributes to control the
behavior of the tests.
``param_names`` (tuple of str):
A tuple of strings with all the names of the parameters that should be
tested. E.g. ``('gamma', 'beta')`` for the batch normalization link.
``()`` by default.
``skip_forward_test`` (bool):
Whether to skip forward computation test. ``False`` by default.
``skip_backward_test`` (bool):
Whether to skip backward computation test. ``False`` by default.
``dodge_nondifferentiable`` (bool):
Enable non-differentiable point detection in numerical gradient
calculation. If the data returned by
``generate_params``, ``create_link`` and ``generate_inputs`` turns out
to be a non-differentiable point, the test will repeatedly resample
those until a differentiable point will be finally sampled. ``False``
by default.
``contiguous`` (None or 'C'):
Specifies the contiguousness of incoming arrays (i.e. inputs,
parameters and gradients. If ``None``, the
arrays will be non-contiguous as long as possible. If ``'C'``, the
arrays will be C-contiguous. ``None`` by default.
.. note::
This class assumes :func:`chainer.testing.inject_backend_tests`
is used together. See the example below.
.. note::
When implementing :class:`~chainer.testing.LinkTestCase` and
:class:`~chainer.testing.LinkInitializersTestCase` to test both
forward/backward and initializers, it is often convenient to refactor
out common logic in a separate class.
.. admonition:: Example
.. testcode::
@chainer.testing.inject_backend_tests(
None,
[
{}, # CPU
{'use_cuda': True}, # GPU
])
class TestLinear(chainer.testing.LinkTestCase):
param_names = ('W', 'b')
def generate_params(self):
initialW = numpy.random.uniform(
-1, 1, (3, 2)).astype(numpy.float32)
initial_bias = numpy.random.uniform(
-1, 1, (3,)).astype(numpy.float32)
return initialW, initial_bias
def generate_inputs(self):
x = numpy.random.uniform(
-1, 1, (1, 2)).astype(numpy.float32)
return x,
def create_link(self, initializers):
initialW, initial_bias = initializers
link = chainer.links.Linear(
2, 3, initialW=initialW, initial_bias=initial_bias)
return link
def forward(self, link, inputs, device):
x, = inputs
return link(x),
def forward_expected(self, link, inputs):
W = link.W.array
b = link.b.array
x, = inputs
expected = x.dot(W.T) + b
return expected,
.. seealso::
:class:`~chainer.testing.LinkInitializersTestCase`
:class:`~chainer.testing.FunctionTestCase`
"""
check_forward_options = None
check_backward_options = None
skip_forward_test = False
skip_backward_test = False
dodge_nondifferentiable = False
def __init__(self, *args, **kwargs):
self.check_forward_options = {}
self.check_backward_options = {}
super(LinkTestCase, self).__init__(*args, **kwargs)
def forward_expected(self, link, inputs):
raise NotImplementedError('forward_expected() is not implemented.')
def generate_grad_outputs(self, outputs_template):
grad_outputs = tuple([
numpy.random.uniform(-1, 1, a.shape).astype(a.dtype)
for a in outputs_template])
return grad_outputs
def test_forward(self, backend_config):
"""Tests forward computation."""
if self.skip_forward_test:
raise unittest.SkipTest('skip_forward_test is set')
self.backend_config = backend_config
self.before_test('test_forward')
inits = self._generate_params()
link = self._create_link(inits, backend_config)
inputs_np = self._generate_inputs()
inputs_xp = backend_config.get_array(inputs_np)
inputs_xp = self._to_noncontiguous_as_needed(inputs_xp)
input_vars = tuple([chainer.Variable(i) for i in inputs_xp])
# Compute forward of the link and initialize its parameters.
output_vars = self._forward(link, input_vars, backend_config)
outputs_xp = [v.array for v in output_vars]
# Expected outputs are computed on the CPU so the link must be
# transferred.
link.to_device(backend.CpuDevice())
expected_outputs_np = self._forward_expected(link, inputs_np)
self.check_forward_outputs(
tuple(outputs_xp), expected_outputs_np)
def test_backward(self, backend_config):
"""Tests backward computation."""
if self.skip_backward_test:
raise unittest.SkipTest('skip_backward_test is set')
self.backend_config = backend_config
self.before_test('test_backward')
# avoid cyclic import
from chainer import gradient_check
def do_check():
# Generate an initialized temporary link that is already forward
# propagated. This link is only used to generate necessary data,
# i.e. inputs, outputs and parameters for the later gradient check
# and the link itself will be discarded.
inits = self._generate_params()
link, inputs, outputs = self._create_initialized_link(
inits, backend_config)
# Extract the parameter ndarrays from the initialized link.
params = _get_link_params(link, self.param_names)
params = [p.array for p in params]
# Prepare inputs, outputs and upstream gradients for the gradient
# check.
cpu_device = backend.CpuDevice()
outputs = [cpu_device.send(output) for output in outputs]
grad_outputs = self._generate_grad_outputs(outputs)
grad_outputs = backend_config.get_array(grad_outputs)
inputs = self._to_noncontiguous_as_needed(inputs)
params = self._to_noncontiguous_as_needed(params)
grad_outputs = self._to_noncontiguous_as_needed(grad_outputs)
# Create the link used for the actual forward propagation in the
# gradient check.
forward_link, _, _ = self._create_initialized_link(
inits, backend_config)
def forward(inputs, ps):
# Use generated parameters.
with forward_link.init_scope():
for param_name, p in zip(self.param_names, ps):
setattr(forward_link, param_name, p)
return self._forward(forward_link, inputs, backend_config)
with LinkTestError.raise_if_fail(
'backward is not implemented correctly'):
gradient_check._check_backward_with_params(
forward, inputs, grad_outputs, params=params,
dtype=numpy.float64,
detect_nondifferentiable=self.dodge_nondifferentiable,
**self.check_backward_options)
if self.dodge_nondifferentiable:
while True:
try:
do_check()
except gradient_check.NondifferentiableError:
continue
else:
break
else:
do_check()
def _forward_expected(self, link, inputs):
assert all(isinstance(x, numpy.ndarray) for x in inputs)
outputs = self.forward_expected(link, inputs)
_check_array_types(inputs, backend.CpuDevice(), 'test_forward')
return outputs
def _generate_grad_outputs(self, outputs_template):
assert all(isinstance(x, numpy.ndarray) for x in outputs_template)
grad_outputs = self.generate_grad_outputs(outputs_template)
_check_array_types(
grad_outputs, backend.CpuDevice(), 'generate_grad_outputs')
return grad_outputs
class LinkInitializersTestCase(_LinkTestBase, unittest.TestCase):
"""A base class for link parameter initializer test cases.
Link test cases can inherit from this class to define a set of link tests
for parameter initialization.
.. rubric:: Required methods
Each concrete class must at least override the following methods.
``generate_params(self)``
Returns a tuple of initializers-likes. The tuple should contain an
initializer-like for each initializer-like argument, i.e. the
parameters to the link constructor. These will be passed to
``create_link``.
``create_link(self, initializers)``
Returns a link. The link should be initialized with the given
initializer-likes ``initializers``. ``initializers`` is a tuple of
same length as the number of parameters.
``generate_inputs(self)``
Returns a tuple of input arrays of type :class:`numpy.ndarray`.
``forward(self, link, inputs, device)``
Implements the target forward function.
``link`` is a link created by ``create_link`` and
``inputs`` is a tuple of :class:`~chainer.Variable`\\ s.
This method is expected to return the output
:class:`~chainer.Variable`\\ s with the same array types as the inputs.
``device`` is the device corresponding to the input arrays.
A default implementation is provided for links that only takes the
inputs defined in ``generate_inputs`` (wrapped in
:class:`~chainer.Variable`\\ s) and returns nothing but output
:class:`~chainer.Variable`\\ s in its forward computation.
``get_initializers(self)``
Returns a tuple with the same length as the number of initializers that
the constructor of the link accepts. Each element in the tuple is a
container itself, listing all initializers-likes that should be tested.
Each initializer-like in the tuple is tested one at a time by being
passed to ``create_link``. When the length of the tuple is greater than
one (i.e. if the link accepts multiple initializers), the ones not
being tested are replaced by the ones returned by `generate_params`.
Initializer-likes returned here should be deterministic since test will
invoke them multiple times to test the correctness.
For testing initializer arguments that can be non-initializer values
such as ``None``, one can use the ``InitializerArgument``, defining a
pair of the link constructor argument and actual initializer-like used
by the link.
This method must be implemented if ``skip_initializers_test`` is
``False`` in which case the initializers test is executed.
.. rubric:: Optional methods
Each concrete class may override the following methods.
``before_test(self, test_name)``
A callback method called before each test.
Typically a skip logic is implemented by conditionally raising
:class:`unittest.SkipTest`.
``test_name`` is always of ``'test_initializers'``.
.. rubric:: Attributes
The concrete class can override the following attributes to control the
behavior of the tests.
``param_names`` (list of str):
A list of strings with all the names of the parameters that should be
tested. E.g. ``['gamma', 'beta']`` for the batch normalization link.
``[]`` by default.
``contiguous`` (None or 'C'):
Specifies the contiguousness of incoming arrays (i.e. inputs,
parameters and gradients. If ``None``, the
arrays will be non-contiguous as long as possible. If ``'C'``, the
arrays will be C-contiguous. ``None`` by default.
.. note::
This class assumes :func:`chainer.testing.inject_backend_tests`
is used together. See the example below.
.. note::
When implementing :class:`~chainer.testing.LinkTestCase` and
:class:`~chainer.testing.LinkInitializersTestCase` to test both
forward/backward and initializers, it is often convenient to refactor
out common logic in a separate class.
.. admonition:: Example
.. testcode::
@chainer.testing.inject_backend_tests(
None,
[
{}, # CPU
{'use_cuda': True}, # GPU
])
class TestLinear(chainer.testing.LinkInitializersTestCase):
param_names = ['W', 'b']
def generate_params(self):
initialW = numpy.random.uniform(
-1, 1, (3, 2)).astype(numpy.float32)
initial_bias = numpy.random.uniform(
-1, 1, (3,)).astype(numpy.float32)
return initialW, initial_bias
def generate_inputs(self):
x = numpy.random.uniform(
-1, 1, (1, 2)).astype(numpy.float32)
return x,
def create_link(self, initializers):
initialW, initial_bias = initializers
link = chainer.links.Linear(
2, 3, initialW=initialW, initial_bias=initial_bias)
return link
def forward(self, link, inputs, device):
x, = inputs
return link(x),
def get_initializers(self):
initialW = [initializers.Constant(1), 2]
initial_bias = [initializers.Constant(2), 3,
chainer.testing.link.InitializerArgument(None, 0)]
return initialW, initial_bias
.. seealso::
:class:`~chainer.testing.LinkTestCase`
:class:`~chainer.testing.FunctionTestCase`
"""
check_initializers_options = None
def __init__(self, *args, **kwargs):