forked from pa-m/sklearn
/
classification.go
99 lines (88 loc) · 2.38 KB
/
classification.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
package metrics
import "gonum.org/v1/gonum/mat"
// "fmt"
// nn "../neural_network"
// LogLoss ...
// func LogLoss(Ytrue, Ypred mat.Matrix) float64 {
// return nn.LogLoss{}.Func(Ytrue, Ypred)
// }
// AccuracyScore reports (weighted) true values/nSamples
func AccuracyScore(Ytrue, Ypred mat.Matrix, normalize bool, sampleWeight *mat.Dense) float64 {
nSamples, nOutputs := Ytrue.Dims()
N, D := 0., 0.
w := 1.
for i := 0; i < nSamples; i++ {
if sampleWeight != nil {
w = sampleWeight.At(i, 0)
}
var eq = true
for j := 0; j < nOutputs; j++ {
yt, yp := Ytrue.At(i, j), Ypred.At(i, j)
if yp >= .5 {
yp = 1.
} else {
yp = 0.
}
eq = eq && yt == yp
}
if eq {
N += w
}
D += w
}
if normalize {
return N / D
}
return N
}
func countTPFPTNFN(Ytrue, Ypred mat.Matrix, pivot float64) (TP, FP, TN, FN float64) {
nSamples, nOutputs := Ytrue.Dims()
for i := 0; i < nSamples; i++ {
for o := 0; o < nOutputs; o++ {
if Ypred.At(i, o) >= pivot {
if Ytrue.At(i, o) >= pivot {
TP += 1.
} else {
FP += 1.
}
} else {
if Ytrue.At(i, o) >= pivot {
FN += 1.
} else {
TN += 1.
}
}
}
}
return
}
// PrecisionScore v https://en.wikipedia.org/wiki/F1_score
func PrecisionScore(Ytrue, Ypred mat.Matrix) float64 {
pivot := 0.5
TP, FP, _, _ := countTPFPTNFN(Ytrue, Ypred, pivot)
return TP / (TP + FP)
}
// RecallScore v https://en.wikipedia.org/wiki/F1_score
func RecallScore(Ytrue, Ypred mat.Matrix) float64 {
pivot := .5
TP, _, _, FN := countTPFPTNFN(Ytrue, Ypred, pivot)
return TP / (TP + FN)
}
// F1Score v https://en.wikipedia.org/wiki/F1_score
func F1Score(Ytrue, Ypred mat.Matrix) float64 {
// P, R := PrecisionScore(Ytrue, Ypred), RecallScore(Ytrue, Ypred)
// return 2. / ((1. / R) + (1. / P))
return FBetaScore(Ytrue, Ypred, 1.)
}
// FBetaScore is the weighted harmonic mean of precision and recall,
// reaching its optimal value at 1 and its worst value at 0.
// The `beta` parameter determines the weight of precision in the combined
// score. ``beta < 1`` lends more weight to precision, while ``beta > 1``
// favors recall (``beta -> 0`` considers only precision, ``beta -> inf``
// only recall)
func FBetaScore(Ytrue, Ypred mat.Matrix, beta float64) float64 {
Beta2 := beta * beta
pivot := 0.5
TP, FP, _, FN := countTPFPTNFN(Ytrue, Ypred, pivot)
return (1 + Beta2) * TP / ((1+Beta2)*TP + Beta2*FN + FP)
}