Skip to content

Commit

Permalink
split
Browse files Browse the repository at this point in the history
  • Loading branch information
kelindar committed Oct 5, 2020
1 parent a25b44c commit 0d513ed
Show file tree
Hide file tree
Showing 5 changed files with 206 additions and 22 deletions.
29 changes: 29 additions & 0 deletions neural/graph_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,32 @@ func TestConnected(t *testing.T) {
assert.True(t, n0.connected(n1))
assert.True(t, n1.connected(n0))
}

func TestSplit(t *testing.T) {
serial = 0
nn := New(1, 1)().(*Network)
in := &nn.nodes[1]
out := &nn.nodes[2]

// create a hidden layer
for i := 0; i < 10; i++ {
nn.split(&synapse{
From: in,
To: out,
Weight: 0.5,
})
}

/*for _, n := range nn.nodes {
println("neuron", n.Serial, len(n.Conns))
for _, c := range n.Conns {
println(" ", c.From.Serial, "->", c.To.Serial)
}
}*/

assert.Equal(t, 0, len(nn.nodes[1].Conns))
assert.Equal(t, 10, len(nn.nodes[2].Conns))
for i := 3; i < 13; i++ {
assert.Equal(t, 1, len(nn.nodes[i].Conns))
}
}
49 changes: 42 additions & 7 deletions neural/network.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,26 @@ func (n *Network) sort() {
// Sort by neuron ID
sort.Sort(n.conns)

// Prepare the first connection
conn := &n.conns[0]
conn.From = n.nodes.Find(conn.From.Serial)
conn.To = n.nodes.Find(conn.To.Serial)

// Assign connection slices to neurons. This is basically sub-slicing the main
// array, so the "Data" pointer of the slice will point to the same underlying
// array, avoiding extra memory space and allocations.
prev, lo := n.conns[0].To, 0
curr, hi := n.conns[0].To, 0
for i, conn := range n.conns {
prev, lo := conn.To, 0
curr, hi := conn.To, 0
for i := 0; i < len(n.conns); i++ {
conn = &n.conns[i]

// Re-assign pointers, to make sure that the pointers to neurons are correct
// which may be caused by nodes slice being re-allocated elsewhere during
// append()
conn.From = n.nodes.Find(conn.From.Serial)
conn.To = n.nodes.Find(conn.To.Serial)
curr, hi = conn.To, i
if prev != curr {
if prev.Serial != curr.Serial {
prev.Conns = n.conns[lo:hi]
prev, lo = curr, hi
}
Expand All @@ -110,16 +122,39 @@ func (n *Network) sort() {
}

// connect connects two neurons together.
func (n *Network) connect(from, to *neuron, weight float64) {
func (n *Network) connect(from, to uint32, weight float64) {
n0 := n.nodes.Find(from)
n1 := n.nodes.Find(to)
if n0.connected(n1) {
return
}

defer n.sort() // Keep sorted
n.conns = append(n.conns, synapse{
From: from, // Left neuron
To: to, // Right neuron
From: n0, // Left neuron
To: n1, // Right neuron
Weight: weight, // Weight for the connection
Active: true, // Default to active
})
}

// Split splits the connetion by adding a neuron in the middle
func (n *Network) split(conn *synapse) {
defer n.sort() // Keep sorted

// Deactivate the connection and add a neuron
conn.Active = false
n.nodes = append(n.nodes, neuron{
Serial: next(),
Kind: isHidden,
})

// Create 2 new connections
middle := n.nodes.Last()
n.connect(conn.From.Serial, middle.Serial, 1.0)
n.connect(middle.Serial, conn.To.Serial, conn.Weight)
}

// Mutate mutates the network.
func (n *Network) Mutate() {
defer n.sort() // Keep sorted
Expand Down
50 changes: 42 additions & 8 deletions neural/network_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func TestClone(t *testing.T) {
assert.True(t, sort.IsSorted(nn1.nodes))

// Add a connection and clone
nn1.connect(&nn1.nodes[1], &nn1.nodes[3], 0.5)
nn1.connect(nn1.nodes[1].Serial, nn1.nodes[3].Serial, 0.5)
nn1.Clone(nn2)

// Clone must match
Expand All @@ -40,15 +40,49 @@ func TestClone(t *testing.T) {

// make2x2 creates a 2x2 tiny network
func make2x2() *Network {
serial = 0
nn := New(2, 2)().(*Network)
i0 := &nn.nodes[1]
i1 := &nn.nodes[2]
o0 := &nn.nodes[3]
o1 := &nn.nodes[4]
i0 := nn.nodes[1]
i1 := nn.nodes[2]
o0 := nn.nodes[3]
o1 := nn.nodes[4]

// connect inputs to output
nn.connect(i0, o0, .5)
nn.connect(i1, o0, .5)
nn.connect(i0, o1, .75)
nn.connect(i0.Serial, o0.Serial, .5)
nn.connect(i1.Serial, o0.Serial, .5)
nn.connect(i0.Serial, o1.Serial, .75)
return nn
}

// makeNN creates a test network
func makeNN(hidden int) *Network {
serial = 0
nn := New(10, 1)().(*Network)
in0 := &nn.nodes[1]
out := &nn.nodes[11]

// create a hidden layer
for i := 0; i < hidden; i++ {
nn.split(&synapse{
From: in0,
To: out,
Weight: 0.5,
})
}

// fully connect the hidden layer
for i := 0; i < 11; i++ { // Input layer
for j := 12; j < 12+hidden; j++ { // Hidden layer
nn.connect(nn.nodes[i].Serial, nn.nodes[j].Serial, 0.75)
}
}

/*for _, n := range nn.nodes {
println("neuron", n.Serial, len(n.Conns))
for _, c := range n.Conns {
println(" ", c.From.Serial, "->", c.To.Serial)
}
}*/

return nn
}
33 changes: 30 additions & 3 deletions neural/predict.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@

package neural

import (
"math"
)

// Predict activates the network
func (n *Network) Predict(input, output []float64) []float64 {
if output == nil {
Expand Down Expand Up @@ -60,9 +64,7 @@ func (n *neuron) Value() float64 {

// Thanks https://codingforspeed.com/using-faster-exponential-approximation/
func exp(x float64) float64 {
x = 1.0 + x/1024.0
x *= x
x *= x
x = 1.0 + x/256.0
x *= x
x *= x
x *= x
Expand All @@ -79,3 +81,28 @@ func exp(x float64) float64 {
func swish(x float64) float64 {
return x / (1.0 + exp(-x))
}

func swish2(x float64) float64 {
return x / (1.0 + math.Exp(-x))
}

func relu(x float64) float64 {
// s, e, f mean `sign bit`, `exponent`, and `fractional`, respectively.
// seeeffff | seeeffff
// 00010100 | 10010100 --+
// >> 31 | >> 31 |
// -------------+------------- |
// not 00000000 | not 11111111 |
// -------------+------------- |
// 11111111 | 00000000 |
// and 00010100 | and 10010100 <-+
// -------------+-------------
// 00010100 | 00000000
// == x | 0.0
v := math.Float64bits(x)
return math.Float64frombits(v &^ (v >> 63))
}

func relu2(x float64) float64 {
return math.Max(x, 0)
}
67 changes: 63 additions & 4 deletions neural/predict_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,23 @@ import (
"github.com/stretchr/testify/assert"
)

// BenchmarkPredict/10-8 2500016 474 ns/op 0 B/op 0 allocs/op
// BenchmarkPredict/100-8 272751 4447 ns/op 0 B/op 0 allocs/op
func BenchmarkPredict(b *testing.B) {
b.Run("2x2", func(b *testing.B) {
nn := make2x2()
in := []float64{1, 0}
out := []float64{0, 0}
in := []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
out := []float64{0}

b.Run("10", func(b *testing.B) {
nn := makeNN(10)
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
nn.Predict(in, out)
}
})

b.Run("100", func(b *testing.B) {
nn := makeNN(100)
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
Expand All @@ -39,3 +50,51 @@ func TestPredict(t *testing.T) {
r := nn.Predict([]float64{0.5, 1}, nil)
assert.True(t, r[0] > 0.5)
}

func BenchmarkFunc(b *testing.B) {
var out float64

b.Run("relu", func(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
out = relu(float64(n))
}
})

b.Run("relu-precise", func(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
out = relu2(float64(n))
}
})

b.Run("swish", func(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
out = swish(float64(n))
}
})

b.Run("swish-precise", func(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
out = swish2(float64(n))
}
})

assert.NotZero(b, out)
}

func TestRelu(t *testing.T) {
var out1, out2 []float64
for i := float64(0); i <= 1.0; i += 0.01 {
out1 = append(out1, relu(i))
out2 = append(out2, relu2(i))
}

assert.Equal(t, out1, out2)
}

0 comments on commit 0d513ed

Please sign in to comment.