/
leaky_relu_test.rb
64 lines (56 loc) · 1.65 KB
/
leaky_relu_test.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# frozen_string_literal: true
require 'chainer/functions/activation/leaky_relu'
class Chainer::Functions::Activation::LeakyReLUTest < Test::Unit::TestCase
data = {
'test1' => {shape: [3, 2], dtype: xm::SFloat},
'test2' => {shape: [], dtype: xm::SFloat},
'test3' => {shape: [3, 2], dtype: xm::DFloat},
'test4' => {shape: [], dtype: xm::DFloat}}
def _setup(data)
# Avoid unstability of numerical grad
@shape = data[:shape]
@dtype = data[:dtype]
@dtype.srand(1) # To avoid false of "nearly_eq().all?", Use fixed seed value.
@x = @dtype.new(@shape).rand(2) - 1
@shape.map do |x|
if (-0.05 < x) and (x < 0.05)
0.5
else
x
end
end
@gy = @dtype.new(@shape).rand(2) - 1
@slope = Random.rand
@check_forward_options = {}
@check_backward_options_dtype = xm::DFloat
end
def check_forward(x_data)
x = Chainer::Variable.new(x_data)
y = Chainer::Functions::Activation::LeakyReLU.leaky_relu(x, slope: @slope)
assert_equal(@dtype, y.data.class)
expected = @x.dup
if expected.shape == []
expected[expected < 0] *= @slope
else
@x.each_with_index do |x, *i|
if x < 0
expected[*i] *= @slope
end
end
end
assert_true(y.data.nearly_eq(expected).all?)
end
data(data)
def test_forward(data)
_setup(data)
check_forward(@x.dup)
end
def check_backward(x_data, y_grad)
Chainer::check_backward(Chainer::Functions::Activation::LeakyReLU.new(slope: @slope), x_data, y_grad, dtype: @check_backward_options_dtype)
end
data(data)
def test_backward(data)
_setup(data)
check_backward(@x.dup, @gy.dup)
end
end