/
leaky_relu.py
124 lines (95 loc) · 3.19 KB
/
leaky_relu.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
from chainer import cuda
from chainer import function_node
from chainer.utils import type_check
_kern = None
def _get_kern():
global _kern
if _kern is None:
_kern = cuda.elementwise(
'T cond, T x, T slope', 'T y',
'y = cond >= 0 ? x : (T)(slope * x)', 'lrelu')
return _kern
class LeakyReLU(function_node.FunctionNode):
"""Leaky rectifier unit."""
def __init__(self, slope=0.2):
self.slope = slope
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward_cpu(self, inputs):
x, = inputs
y = x.copy()
y[x < 0] *= self.slope
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def forward_gpu(self, inputs):
x, = inputs
y = _get_kern()(x, x, self.slope)
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def backward(self, indexes, grad_outputs):
if self.slope >= 0:
x = None
y = self.get_retained_outputs()[0].data
else:
x = self.get_retained_inputs()[0].data
y = None
return _LeakyReLUGrad(x, y, self.slope).apply(grad_outputs)
class _LeakyReLUGrad(function_node.FunctionNode):
def __init__(self, x, y, slope):
self.slope = slope
self.x = x
self.y = y
def forward_cpu(self, inputs):
gy, = inputs
gy = gy.copy()
if self.slope >= 0:
gy[self.y < 0] *= self.slope
else:
gy[self.x < 0] *= self.slope
return gy,
def forward_gpu(self, inputs):
gy, = inputs
if self.slope >= 0:
gy = _get_kern()(self.y, gy, self.slope)
else:
gy = _get_kern()(self.x, gy, self.slope)
return gy,
def backward(self, indexes, grad_outputs):
return _LeakyReLUGrad(self.x, self.y, self.slope).apply(grad_outputs)
def leaky_relu(x, slope=0.2):
"""Leaky Rectified Linear Unit function.
This function is expressed as
.. math::
f(x) = \\left \\{ \\begin{array}{ll}
x & {\\rm if}~ x \\ge 0 \\\\
ax & {\\rm if}~ x < 0,
\\end{array} \\right.
where :math:`a` is a configurable slope value.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`):
Input variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array.
slope (float): Slope value :math:`a`.
Returns:
~chainer.Variable: Output variable. A
:math:`(s_1, s_2, ..., s_N)`-shaped float array.
.. admonition:: Example
>>> x = np.array([[-1, 0], [2, -3], [-2, 1]], 'f')
>>> x
array([[-1., 0.],
[ 2., -3.],
[-2., 1.]], dtype=float32)
>>> F.leaky_relu(x, slope=0.2).data
array([[-0.2 , 0. ],
[ 2. , -0.60000002],
[-0.40000001, 1. ]], dtype=float32)
"""
return LeakyReLU(slope).apply((x,))[0]