/
loss.go
89 lines (71 loc) · 2.06 KB
/
loss.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
package nngo
import (
"fmt"
"math"
"gonum.org/v1/gonum/mat"
)
// Each constant represents an loss function and its derivative
// This makes the definition of neural networks easier
const (
LossMse = 0
LossMae = 1
)
// tuple of lossFunction and lossFunctionDerivative
type lossTuple struct {
loss lossFunc
lossDerivative lossFuncDerivative
}
// get function tuple based on specification number
func getLossTuple(activationSpecs int) (lossTuple, error) {
var funcs lossTuple
if activationSpecs < 0 || activationSpecs > 2 {
return funcs, fmt.Errorf("wrong specification")
}
if activationSpecs == LossMse {
funcs = lossTuple{Mse, MseDerivative}
return funcs, nil
} else {
funcs = lossTuple{Mae, MaeDerivative}
return funcs, nil
}
}
type lossFunc func(yTrue, yPred mat.VecDense) (float64, error)
type lossFuncDerivative func(yTrue, yPred mat.VecDense) (mat.VecDense, error)
func Mse(yTrue, yPred mat.VecDense) (float64, error) {
if yTrue.Len() != yPred.Len() {
return 0.0, fmt.Errorf("vectors need to have the same dimensions")
}
var sum float64
for i := 0; i < yTrue.Len(); i++ {
sum += math.Pow(yTrue.AtVec(i)-yPred.AtVec(i), 2)
}
return sum / float64(yTrue.Len()), nil
}
func MseDerivative(yTrue, yPred mat.VecDense) (mat.VecDense, error) {
var ans mat.VecDense
if yTrue.Len() != yPred.Len() {
return ans, fmt.Errorf("vectors need to have the same dimensions")
}
ans.SubVec(&yPred, &yTrue)
ans.ScaleVec(2, &ans)
ans.ScaleVec(float64(1/float64(yTrue.Len())), &ans)
return ans, nil
}
func Mae(yTrue, yPred mat.VecDense) (float64, error) {
if yTrue.Len() != yPred.Len() {
return 0.0, fmt.Errorf("vectors need to have the same dimensions")
}
var sum float64
for i := 0; i < yTrue.Len(); i++ {
sum += math.Abs(yTrue.AtVec(i) - yPred.AtVec(i))
}
return sum / float64(yTrue.Len()), nil
}
func MaeDerivative(yTrue, yPred mat.VecDense) (mat.VecDense, error) {
var ans mat.VecDense
if yTrue.Len() != yPred.Len() {
return ans, fmt.Errorf("vectors need to have the same dimensions")
}
// TODO: add derivative of Mae
return yTrue, nil
}