-
Notifications
You must be signed in to change notification settings - Fork 0
/
activation.go
75 lines (69 loc) · 1.52 KB
/
activation.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
package nn
import (
"math"
)
// Given a function f and its derivative g, returns an activation function.
func MakeActivation(op string, f, g func(float64) float64) func(*Value) *Value {
return func(value *Value) *Value {
data := f(value.data)
ans := &Value{
data: data,
op: op,
children: []*Value{value},
}
ans.backward = func() {
value.grad += g(value.data) * ans.grad
}
return ans
}
}
// Rectified linear unit: y = max(0, x)
func Relu(value *Value) *Value {
f := func(x float64) float64 {
if x > 0.0 {
return x
}
return 0.0
}
g := func(x float64) float64 {
if x > 0.0 {
return 1.0
}
return 0.0
}
return MakeActivation("ReLU", f, g)(value)
}
// Sigmoid function: y = 1/(1 + exp(-x))
func Sigmoid(value *Value) *Value {
f := func(x float64) float64 {
return 1.0 / (1.0 + math.Exp(-x))
}
g := func(x float64) float64 {
y := f(x)
return y * (1 - y)
}
return MakeActivation("Sigmoid", f, g)(value)
}
// Hyperbolic tangent (tanh): y = (exp(2x) - 1) / (exp(2x) + 1)
func Tanh(value *Value) *Value {
f := func(x float64) float64 {
y := math.Exp(2 * x)
return (y - 1.0) / (y + 1.0)
}
g := func(x float64) float64 {
y := f(x)
return 1.0 - y*y
}
return MakeActivation("Tanh", f, g)(value)
}
// Exponent: y = exp(x)
// The output needs normalization which will be done in the specified layer.
func Softmax(value *Value) *Value {
f := func(x float64) float64 {
return math.Exp(x)
}
g := func(x float64) float64 {
return f(x)
}
return MakeActivation("Exp", f, g)(value)
}