-
Notifications
You must be signed in to change notification settings - Fork 19
/
ermahagerdmonards.go
154 lines (136 loc) · 4.05 KB
/
ermahagerdmonards.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
package dual
import (
"fmt"
"github.com/pkg/errors"
G "gorgonia.org/gorgonia"
"gorgonia.org/gorgonia/ops/nn"
"gorgonia.org/tensor"
)
type maebe struct {
err error
}
type batchNormOp interface {
SetTraining()
SetTesting()
Reset() error
}
// generic monad... may be useful
func (m *maebe) do(f func() (*G.Node, error)) (retVal *G.Node) {
if m.err != nil {
return nil
}
if retVal, m.err = f(); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func (m *maebe) conv(input *G.Node, filterCount, size int, name string) (retVal *G.Node) {
if m.err != nil {
return nil
}
featureCount := input.Shape()[1]
padding := findPadding(input.Shape()[2], input.Shape()[3], size, size)
filter := G.NewTensor(input.Graph(), Float, 4, G.WithShape(filterCount, featureCount, size, size), G.WithName("Filter"+name), G.WithInit(G.GlorotU(1.0)))
// assume well behaved images
if retVal, m.err = nnops.Conv2d(input, filter, []int{size, size}, padding, []int{1, 1}, []int{1, 1}); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func (m *maebe) batchnorm(input *G.Node) (retVal *G.Node, retOp batchNormOp) {
if m.err != nil {
return nil, nil
}
// note: the scale and biases will still be created
// and they will still be backpropagated
if retVal, _, _, retOp, m.err = nnops.BatchNorm(input, nil, nil, 0.997, 1e-5); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func (m *maebe) res(input *G.Node, filterCount int, name string) (*G.Node, batchNormOp) {
convolved := m.conv(input, filterCount, 3, name)
normalized, op := m.batchnorm(convolved)
retVal := m.rectify(normalized)
return retVal, op
}
func (m *maebe) share(input *G.Node, filterCount, layer int) (*G.Node, batchNormOp, batchNormOp) {
layer1, l1Op := m.res(input, filterCount, fmt.Sprintf("Layer1 of Shared Layer %d", layer))
layer2, l2Op := m.res(input, filterCount, fmt.Sprintf("Layer2 of Shared Layer %d", layer))
added := m.do(func() (*G.Node, error) { return G.Add(layer1, layer2) })
retVal := m.rectify(added)
return retVal, l1Op, l2Op
}
func (m *maebe) linear(input *G.Node, units int, name string) *G.Node {
if m.err != nil {
return nil
}
// figure out size
w := G.NewTensor(input.Graph(), Float, 2, G.WithShape(input.Shape()[1], units), G.WithInit(G.GlorotN(1.0)), G.WithName(name+"_w"))
xw := m.do(func() (*G.Node, error) { return G.Mul(input, w) })
b := G.NewTensor(xw.Graph(), Float, xw.Shape().Dims(), G.WithShape(xw.Shape().Clone()...), G.WithName(name+"_b"), G.WithInit(G.Zeroes()))
return m.do(func() (*G.Node, error) { return G.Add(xw, b) })
}
func (m *maebe) rectify(input *G.Node) (retVal *G.Node) {
if m.err != nil {
return nil
}
if retVal, m.err = nnops.Rectify(input); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func (m *maebe) reshape(input *G.Node, to tensor.Shape) (retVal *G.Node) {
if m.err != nil {
return nil
}
if retVal, m.err = G.Reshape(input, to); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func (m *maebe) xent(output, target *G.Node) (retVal *G.Node) {
var one *G.Node
switch Float {
case G.Float32:
one = G.NewConstant(float32(1))
case G.Float64:
one = G.NewConstant(float64(1))
}
var omy, omout *G.Node
if omy, m.err = G.Sub(one, target); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
if omout, m.err = G.Sub(one, output); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
var fst, snd *G.Node
if fst, m.err = G.HadamardProd(target, output); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
if snd, m.err = G.HadamardProd(omy, omout); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
if retVal, m.err = G.Add(fst, snd); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
if retVal, m.err = G.Neg(retVal); m.err != nil {
m.err = errors.WithStack(m.err)
return nil
}
if retVal, m.err = G.Mean(retVal); m.err != nil {
m.err = errors.WithStack(m.err)
}
return
}
func findPadding(inputX, inputY, kernelX, kernelY int) []int {
return []int{
(inputX - 1 - inputX + kernelX) / 2,
(inputY - 1 - inputY + kernelY) / 2,
}
}