Skip to content

Commit

Permalink
Changed linear combination implemenation to one that operates on immu…
Browse files Browse the repository at this point in the history
…table list
  • Loading branch information
martar committed Oct 29, 2012
1 parent 4e2b03a commit a892660
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 24 deletions.
34 changes: 17 additions & 17 deletions src/main/scala/nnetworks/NNetwork.scala
@@ -1,6 +1,7 @@
package nnetworks

import scala.Array.canBuildFrom
import scala.util.Random

object Functions {
def sigmoid(v: Double): Double = 1.0 / (1.0 + math.exp(-v))
Expand All @@ -17,28 +18,28 @@ object Functions {
/**
* layer is a two dimensional array of doubles - weights
*/
class Layer(activation: Double => Double, weights: Array[Array[Double]]) {

/**
* Multiply input vector with weight matrix
* Fastest implementation based on http://blog.scala4java.com/2011/12/matrix-multiplication-in-scala-single.html
*/
def multiThreadedIdiomatic(m1: Array[Double], m2: Array[Array[Double]]) = {
val res = Array.fill[Double](m2(0).length)(0.0)
for (
col <- (0 until m2(0).length).par;
i <- 0 until m1.par.length
) {
res(col) += m1(i) * m2(i)(col)
class Layer(activation: Double => Double, weights: List[List[Double]]) {

def genRandom() = {
Seq.fill(weights.length)(Random.nextDouble)
}

def multiThreadedLinearCombination(input: List[Double], matrix: List[List[Double]]) = {

def helper(input: List[Double], matrix: List[List[Double]], accu: List[Double]): List[Double] = matrix match {
case Nil => accu.reverse
case head :: tail => helper(input, tail, (head.par.zip(input.par) map ((tuple: (Double, Double)) => tuple._1 * tuple._2) sum) :: accu)
}
res.toList
helper(input, matrix, List())

}

/**
* Evaluate the layer: multiply input with weights and apply activation function to each neuron
*/
def eval(input: List[Double]): List[Double] = {
multiThreadedIdiomatic(input.toArray, weights) map (activation)
multiThreadedLinearCombination(input, weights) map (activation)
}
}

Expand All @@ -49,9 +50,8 @@ class Network(layers: List[Layer]) {
case head :: tail => eval0(tail, 1 :: head.eval(input)) //we add a bias neuron set to 1

}
//add a bias neuron set to 1
eval0(layers, 1 :: input)
//add a bias neuron set to 1
eval0(layers, 1 :: input)
}


}
14 changes: 7 additions & 7 deletions src/test/scala/nnetworks/NetworkTestSuite.scala
Expand Up @@ -27,7 +27,7 @@ class NetworkTestSuite extends FunSuite {
}

test("Test AND layer implemented with activation function") {
val weights: Array[Array[Double]] = Array(Array(-30.0), Array(20), Array(20))
val weights: List[List[Double]] = List(List(-30., 20, 20))
val layer = new Layer(Functions.steep(0.5), weights)

assert(layer.eval(List(1., 0., 0.)) === List(0))
Expand All @@ -38,7 +38,7 @@ class NetworkTestSuite extends FunSuite {
}

test("Test XOR first layer network implemented") {
val weights: Array[Array[Double]] = Array(Array(-30.0, 10.), Array(20, -20.), Array(20, -10.))
val weights: List[List[Double]] = List(List(-30., 20., 20.), List(10., -20., -10.))
val layer = new Layer(Functions.steep(0.5), weights)

assert(layer.eval(List(1., 0., 0.)) === List(0, 1))
Expand All @@ -49,7 +49,7 @@ class NetworkTestSuite extends FunSuite {
}

test("Test AND network implemented with activation function - Network! with explicit Bias") {
val weights: Array[Array[Double]] = Array(Array(-30.0), Array(20), Array(20))
val weights: List[List[Double]] = List(List(-30.0, 20, 20))
val layer = new Layer(Functions.steep(0.5), weights)
val net = new Network(List(layer))
assert(net.eval(List(0., 0.)) === List(0))
Expand All @@ -58,12 +58,12 @@ class NetworkTestSuite extends FunSuite {
assert(net.eval(List(1., 1.)) === List(1))
}

test("Test XOR network implemented with steep activation function - Network!") {
test("Test XOR with steep activation function") {
val activation = Functions.steep(0.5)
val weights: Array[Array[Double]] = Array(Array(-30.0, 10.), Array(20, -20.), Array(20, -10.))
val weights: List[List[Double]] = List(List(-30.0, 20, 20), List( 10., -20., -10.))
val layer = new Layer(activation, weights)

val weights2: Array[Array[Double]] = Array(Array(-10.0), Array(20), Array(20))
val weights2: List[List[Double]] = List(List(-10.0, 20, 20))
val layer2 = new Layer(activation, weights2)

val net = new Network(List(layer, layer2))
Expand All @@ -72,5 +72,5 @@ class NetworkTestSuite extends FunSuite {
assert(net.eval(List(1., 0.)) === List(0))
assert(net.eval(List(1., 1.)) === List(1))
}

}

0 comments on commit a892660

Please sign in to comment.