forked from sjwhitworth/golearn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
linear_regression.go
133 lines (108 loc) · 3.28 KB
/
linear_regression.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
package linear_models
import (
"errors"
"github.com/ep2012/golearn/base"
"fmt"
_ "github.com/gonum/blas"
"gonum.org/v1/gonum/mat"
)
var (
NotEnoughDataError = errors.New("not enough rows to support this many variables.")
NoTrainingDataError = errors.New("you need to Fit() before you can Predict()")
)
type LinearRegression struct {
fitted bool
disturbance float64
regressionCoefficients []float64
attrs []base.Attribute
cls base.Attribute
}
func NewLinearRegression() *LinearRegression {
return &LinearRegression{fitted: false}
}
func (lr *LinearRegression) Fit(inst base.FixedDataGrid) error {
// Retrieve row size
_, rows := inst.Size()
// Validate class Attribute count
classAttrs := inst.AllClassAttributes()
if len(classAttrs) != 1 {
return fmt.Errorf("Only 1 class variable is permitted")
}
classAttrSpecs := base.ResolveAttributes(inst, classAttrs)
// Retrieve relevant Attributes
allAttrs := base.NonClassAttributes(inst)
attrs := make([]base.Attribute, 0)
for _, a := range allAttrs {
if _, ok := a.(*base.FloatAttribute); ok {
attrs = append(attrs, a)
}
}
cols := len(attrs) + 1
if rows < cols {
return NotEnoughDataError
}
// Retrieve relevant Attribute specifications
attrSpecs := base.ResolveAttributes(inst, attrs)
// Split into two matrices, observed results (dependent variable y)
// and the explanatory variables (X) - see http://en.wikipedia.org/wiki/Linear_regression
observed := mat.NewDense(rows, 1, nil)
explVariables := mat.NewDense(rows, cols, nil)
// Build the observed matrix
inst.MapOverRows(classAttrSpecs, func(row [][]byte, i int) (bool, error) {
val := base.UnpackBytesToFloat(row[0])
observed.Set(i, 0, val)
return true, nil
})
// Build the explainatory variables
inst.MapOverRows(attrSpecs, func(row [][]byte, i int) (bool, error) {
// Set intercepts to 1.0
explVariables.Set(i, 0, 1.0)
for j, r := range row {
explVariables.Set(i, j+1, base.UnpackBytesToFloat(r))
}
return true, nil
})
n := cols
qr := new(mat.QR)
qr.Factorize(explVariables)
var q, reg mat.Dense
qr.QTo(&q)
qr.RTo(®)
var transposed, qty mat.Dense
transposed.CloneFrom(q.T())
qty.Mul(&transposed, observed)
regressionCoefficients := make([]float64, n)
for i := n - 1; i >= 0; i-- {
regressionCoefficients[i] = qty.At(i, 0)
for j := i + 1; j < n; j++ {
regressionCoefficients[i] -= regressionCoefficients[j] * reg.At(i, j)
}
regressionCoefficients[i] /= reg.At(i, i)
}
lr.disturbance = regressionCoefficients[0]
lr.regressionCoefficients = regressionCoefficients[1:]
lr.fitted = true
lr.attrs = attrs
lr.cls = classAttrs[0]
return nil
}
func (lr *LinearRegression) Predict(X base.FixedDataGrid) (base.FixedDataGrid, error) {
if !lr.fitted {
return nil, NoTrainingDataError
}
ret := base.GeneratePredictionVector(X)
attrSpecs := base.ResolveAttributes(X, lr.attrs)
clsSpec, err := ret.GetAttribute(lr.cls)
if err != nil {
return nil, err
}
X.MapOverRows(attrSpecs, func(row [][]byte, i int) (bool, error) {
var prediction float64 = lr.disturbance
for j, r := range row {
prediction += base.UnpackBytesToFloat(r) * lr.regressionCoefficients[j]
}
ret.Set(clsSpec, i, base.PackFloatToBytes(prediction))
return true, nil
})
return ret, nil
}