-
Notifications
You must be signed in to change notification settings - Fork 1
/
softmax.py
58 lines (50 loc) · 2.06 KB
/
softmax.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import numpy as np
class Softmax(object):
def __init__(self, is_zero_pad=False):
self.eps = 1e-6
self.is_zero_pad = is_zero_pad
def forward(self, x):
_ = np.exp(x)
return np.divide(_, np.array(_.sum( axis=-1 ).T, ndmin=len(_.shape)).T)
def backward(self, y, target):
if self.is_zero_pad:
input_size = y.shape[-1]
batch_size = np.prod( y.shape )/input_size
y_dot = y.reshape( (batch_size, input_size) )
t_dot = target.reshape( (batch_size, input_size) )
y_dot = np.dot(np.diag( [(1.0 if v>=0.5 else 0.0) for v in np.sum(t_dot, axis=-1)] ).T, y_dot)
return (y_dot - t_dot).reshape( y.shape )
else:
return y - target
def loss(self, y, target):
return - np.sum( np.log(y+self.eps) * target )
def error(self, y, target):
input_size = y.shape[-1]
batch_size = np.prod( y.shape )/input_size
y_dot = y.reshape( (batch_size, input_size) )
t_dot = target.reshape( (batch_size, input_size) )
return batch_size - np.sum( [(1.0 if np.sum(t_dot[i]) <= 0.5 or t_dot[i][v] >= 0.5 else 0.0) for i, v in enumerate( np.argmax(y_dot, axis=-1) )] )
def __test(self):
'''
>>> x = np.log( np.array([[1, 1], [12, 6], [3, 8]]) )
>>> t = np.array([[1, 0], [0, 1], [1, 0]])
>>> f = Softmax()
>>> y = f.forward( x )
>>> print [['%.2f'%_ for _ in v] for v in y]
[['0.50', '0.50'], ['0.67', '0.33'], ['0.27', '0.73']]
>>> d = f.backward(y, t)
>>> print [['%.2f'%_ for _ in v] for v in d]
[['-0.50', '0.50'], ['0.67', '-0.67'], ['-0.73', '0.73']]
>>> l = f.loss(y, t)
>>> print '%.2f'%l
3.09
>>> f = Softmax(is_zero_pad=True)
>>> t = np.array([[1, 0], [0, 1], [0, 0]])
>>> d = f.backward(y, t)
>>> print [['%.2f'%_ for _ in v] for v in d]
[['-0.50', '0.50'], ['0.67', '-0.67'], ['0.00', '0.00']]
'''
pass
if __name__ == "__main__":
import doctest
doctest.testmod()