Skip to content

Commit

Permalink
merged Elman RNN with Feed Forward
Browse files Browse the repository at this point in the history
  • Loading branch information
jonhkr committed Jul 26, 2014
1 parent dacc4c7 commit 8d97eef
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 185 deletions.
61 changes: 59 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,61 @@
gobrain
=======
#gobrain

Neural Networks written in go


## Getting Started
The version `1.0.0` includes just basic Neural Network functions such as Feed Forward and Elman Recurrent Neural Network.
A simple Feed Forward Neural Network can be constructed and trained as follows:

```golang
// set the random seed to 0
rand.Seed(0)

// create the XOR representation patter to train the network
patterns := [][][]float64{
{{0, 0}, {0}},
{{0, 1}, {1}},
{{1, 0}, {1}},
{{1, 1}, {0}},
}

// instantiate the Feed Forward
ff := &nn.FeedForward{}

// initialize the Neural Network;
// the networks structure will contain:
// 2 inputs, 2 hidden nodes and 1 output.
ff.Init(2, 2, 1)

// train the network using the XOR patterns
// the training will run for 1000 epochs
// the learning rate is set to 0.6 and the momentum factor to 0.4
// use true in the last parameter to receive reports about the learning error
ff.Train(patterns, 1000, 0.6, 0.4, true)
```

After running this code the network will be trained and ready to be used.

The network can be tested running using the `Test` method, for instance:

```golang
ff.Test(patterns)
```

The test operation will print in the console something like:

```
[0 0] -> [0.057503945708445] : [0]
[0 1] -> [0.930100635071210] : [1]
[1 0] -> [0.927809966227284] : [1]
[1 1] -> [0.097408795324620] : [0]
```

Where the first values are the inputs, the values after the arrow `->` are the output values from the network and the values after `:` are the expected outputs.

## Recurrent Neural Network


## Changelog
* 1.0.0 - Added Feed Forward Neural Network with contexts from Elman RNN

170 changes: 0 additions & 170 deletions nn/elmanrnn.go

This file was deleted.

48 changes: 36 additions & 12 deletions nn/feedforward.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,19 @@ type FeedForward struct {
Regression bool
// Activations for nodes
InputActivations, HiddenActivations, OutputActivations []float64
// ElmanRNN contexts
Contexts [][]float64
// Weights
InputWeights, OutputWeights [][]float64
// Last change in weights for momentum
InputChanges, OutputChanges [][]float64
}

// Initialize the neural network
func (nn *FeedForward) Init(inputs, hiddens, outputs int, regression bool) {
func (nn *FeedForward) Init(inputs, hiddens, outputs int) {
nn.NInputs = inputs + 1 // +1 for bias
nn.NHiddens = hiddens + 1 // +1 for bias
nn.NOutputs = outputs
nn.Regression = regression

nn.InputActivations = vector(nn.NInputs, 1.0)
nn.HiddenActivations = vector(nn.NHiddens, 1.0)
Expand All @@ -49,6 +50,18 @@ func (nn *FeedForward) Init(inputs, hiddens, outputs int, regression bool) {
nn.OutputChanges = matrix(nn.NHiddens, nn.NOutputs)
}

func (nn *FeedForward) SetContexts(nContexts int, initValues [][]float64) {
if initValues == nil {
initValues = make([][]float64, nContexts)

for i := 0; i < nContexts; i++ {
initValues[i] = vector(nn.NHiddens, 0.5)
}
}

nn.Contexts = initValues
}

func (nn *FeedForward) Update(inputs []float64) []float64 {
if len(inputs) != nn.NInputs-1 {
log.Fatal("Error: wrong number of inputs")
Expand All @@ -60,22 +73,36 @@ func (nn *FeedForward) Update(inputs []float64) []float64 {

for i := 0; i < nn.NHiddens-1; i++ {
var sum float64 = 0.0

for j := 0; j < nn.NInputs; j++ {
sum += nn.InputActivations[j] * nn.InputWeights[j][i]
}

// compute contexts sum
for k := 0; k < len(nn.Contexts); k++ {
for j := 0; j < nn.NHiddens-1; j++ {
sum += nn.Contexts[k][j]
}
}

nn.HiddenActivations[i] = sigmoid(sum)
}

// update the contexts
if len(nn.Contexts) > 0 {
for i := len(nn.Contexts) - 1; i > 0; i-- {
nn.Contexts[i] = nn.Contexts[i-1]
}
nn.Contexts[0] = nn.HiddenActivations
}

for i := 0; i < nn.NOutputs; i++ {
var sum float64 = 0.0
for j := 0; j < nn.NHiddens; j++ {
sum += nn.HiddenActivations[j] * nn.OutputWeights[j][i]
}
if nn.Regression {
nn.OutputActivations[i] = sum
} else {
nn.OutputActivations[i] = sigmoid(sum)
}

nn.OutputActivations[i] = sigmoid(sum)
}

return nn.OutputActivations
Expand All @@ -88,11 +115,7 @@ func (nn *FeedForward) BackPropagate(targets []float64, lRate, mFactor float64)

outputDeltas := vector(nn.NOutputs, 0.0)
for i := 0; i < nn.NOutputs; i++ {
outputDeltas[i] = targets[i] - nn.OutputActivations[i]

if !nn.Regression {
outputDeltas[i] = dsigmoid(nn.OutputActivations[i]) * outputDeltas[i]
}
outputDeltas[i] = dsigmoid(nn.OutputActivations[i]) * (targets[i] - nn.OutputActivations[i])
}

hiddenDeltas := vector(nn.NHiddens, 0.0)
Expand All @@ -102,6 +125,7 @@ func (nn *FeedForward) BackPropagate(targets []float64, lRate, mFactor float64)
for j := 0; j < nn.NOutputs; j++ {
e += outputDeltas[j] * nn.OutputWeights[i][j]
}

hiddenDeltas[i] = dsigmoid(nn.HiddenActivations[i]) * e
}

Expand Down
30 changes: 29 additions & 1 deletion test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,36 @@ func main() {

ff := &nn.FeedForward{}

ff.Init(2, 2, 1, false)
ff.Init(2, 2, 1)
ff.Train(patterns, 1000, 0.6, 0.4, false)
ff.Test(patterns)

// j 0 0 1
// o 0 1 0
// n 0 1 1
// a 1 0 0
// s 1 0 1

// 1 1 2 3 5 8
// patternsrnn := [][][]float64{
// {{0, 0, 1}, {0, 1, 0}},
// {{0, 1, 0}, {0, 1, 1}},
// {{0, 1, 1}, {1, 0, 0}},
// {{1, 0, 0}, {1, 0, 1}},
// {{1, 0, 1}, {0, 0, 0}},
// }

// patternsrnntest := [][][]float64{
// {{0, 1, 0}, {0, 1, 1}}, // o
// {{0, 1, 1}, {1, 0, 0}}, // n
// {{1, 0, 0}, {1, 0, 1}}, // a
// {{1, 0, 1}, {0, 0, 0}}, // s
// }

// rnn := &nn.FeedForward{}

// rnn.Init(3, 2, 3, false)
// rnn.SetContexts(2, nil)
// rnn.Train(patternsrnn, 100000, 0.3, 0.01, true)
// rnn.Test(patternsrnntest)
}

0 comments on commit 8d97eef

Please sign in to comment.