-
Notifications
You must be signed in to change notification settings - Fork 0
/
derivative.go
100 lines (88 loc) · 2.05 KB
/
derivative.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
package layer
import (
"math"
"math/rand"
"github.com/jmpargana/matrix"
)
// DerivativeFunctions is a map of the derivatives of the available activation functions.
// It uses the formulas defined in https://en.wikipedia.org/wiki/Activation_function.
var DerivativeFunctions = map[string]fnType{
"relu": dRelu,
"identity": dIdentity,
"binary_step": dBinaryStep,
"sigmoid": dSigmoid,
"tanh": dTanH,
"lrelu": dLReLU,
"rrelu": dRReLU,
"arctan": dArcTan,
"softmax": dSoftmax,
}
func dIdentity(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 { return 1 })
return
}
func dRelu(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
if x <= 0 {
return 0
}
return 1
})
return
}
func dBinaryStep(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
if x != 0 {
return 0
}
// ? https://en.wikipedia.org/wiki/Activation_function
return 1
})
return
}
func dSigmoid(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
fx := 1 / (1 + math.Exp(-x))
return fx * (1 - fx)
})
return
}
func dTanH(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
fx := (math.Exp(x) - math.Exp(-x)) / (math.Exp(x) + math.Exp(-x))
return 1 - fx*fx
})
return
}
func dLReLU(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
if x < 0 {
return 0.01
}
return 1
})
return
}
func dRReLU(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
alpha := rand.Float64()
if x < 0 {
return alpha
}
return 1
})
return
}
func dArcTan(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
return 1 / (x*x + 1)
})
return
}
// TODO: implement
func dSoftmax(in matrix.Matrix) (m matrix.Matrix, err error) {
m, err = apply(in, func(x float64) float64 {
return x * (1 - x)
})
return
}