Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,17 +1,79 @@
package com.thoughtworks.deeplearning
import java.io.{PrintStream, PrintWriter}

import com.thoughtworks.deeplearning.DeepLearning.Tape
import com.thoughtworks.continuation._
import com.thoughtworks.future._

import scalaz.syntax.all._
import com.thoughtworks.raii.asynchronous._
import simulacrum.typeclass

import scala.language.implicitConversions
import algebra.ring.MultiplicativeMonoid
import scalaz.Semigroup

object DeepLearning {

implicit object multipleExceptionThrowableSemigroup extends Semigroup[Throwable] {
override def append(f1: Throwable, f2: => Throwable): Throwable =
f1 match {
case me1: AbstractMultipleException =>
f2 match {
case me2: AbstractMultipleException => MultipleException(me1.throwableSet ++ me2.throwableSet)
case e: Throwable => MultipleException(me1.throwableSet + e)
}
case _: Throwable =>
f2 match {
case me2: AbstractMultipleException => MultipleException(me2.throwableSet + f1)
case `f1` => f1
case e: Throwable => MultipleException(Set(f1, e))
}
}
}

private final case class MultipleException(throwableSet: Set[Throwable])
extends DeepLearning.AbstractMultipleException

abstract class AbstractMultipleException extends RuntimeException("Multiple exceptions found") {

def throwableSet: Set[Throwable]

override def toString: String = throwableSet.mkString("\n")

override def printStackTrace(): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace()
}
}

override def printStackTrace(s: PrintStream): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace(s)
}
}

override def printStackTrace(s: PrintWriter): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace(s)
}
}

override def getStackTrace: Array[StackTraceElement] = synchronized {
super.getStackTrace match {
case null =>
setStackTrace(throwableSet.flatMap(_.getStackTrace)(collection.breakOut))
super.getStackTrace
case stackTrace =>
stackTrace
}
}

override def fillInStackTrace(): this.type = {
this
}

}

/** The node of wengert list created during [[DeepLearning.forward forward]] pass */
final case class Tape[+Data, -Delta](data: Data, backward: Do[Delta] => UnitContinuation[Unit])

Expand Down
9 changes: 8 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ lazy val `plugins-Logging` = project.dependsOn(`plugins-Layers`, `plugins-Weight

lazy val `plugins-Operators` = project

lazy val `plugins-HLists` = project.dependsOn(DeepLearning)

lazy val `plugins-Products` = project.dependsOn(`plugins-HLists`)

lazy val `plugins-FloatTraining` = project.dependsOn(`plugins-Training`)

lazy val `plugins-FloatLiterals` = project.dependsOn(`DeepLearning`)
Expand All @@ -38,7 +42,8 @@ lazy val `plugins-CumulativeFloatLayers` =
`plugins-FloatTraining` % Test,
`plugins-FloatLiterals` % Test,
`plugins-FloatWeights` % Test,
`plugins-ImplicitsSingleton` % Test
`plugins-ImplicitsSingleton` % Test,
`plugins-Products` % Test
)

lazy val `plugins-Training` = project.dependsOn(DeepLearning)
Expand Down Expand Up @@ -114,6 +119,8 @@ lazy val `plugins-CumulativeDoubleLayers` =

lazy val `plugins-Builtins` =
project.dependsOn(
`plugins-Products`,
`plugins-HLists`,
`plugins-ImplicitsSingleton`,
`plugins-Layers`,
`plugins-Weights`,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,9 @@ trait Builtins
with INDArrayLiterals
with INDArrayWeights
with INDArrayLayers
with CumulativeINDArrayLayers {
with CumulativeINDArrayLayers
with HLists
with Products {

trait ImplicitsApi
extends super[Layers].ImplicitsApi
Expand All @@ -118,6 +120,8 @@ trait Builtins
with super[INDArrayTraining].ImplicitsApi
with super[INDArrayLiterals].ImplicitsApi
with super[INDArrayLayers].ImplicitsApi
with super[HLists].ImplicitsApi
with super[Products].ImplicitsApi

type Implicits <: ImplicitsApi

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ trait Builtins
with DoubleLiterals
with DoubleWeights
with DoubleLayers
with CumulativeDoubleLayers {
with CumulativeDoubleLayers
with HLists
with Products {

trait ImplicitsApi
extends super[Layers].ImplicitsApi
Expand All @@ -32,6 +34,8 @@ trait Builtins
with super[DoubleTraining].ImplicitsApi
with super[DoubleLiterals].ImplicitsApi
with super[DoubleLayers].ImplicitsApi
with super[HLists].ImplicitsApi
with super[Products].ImplicitsApi

type Implicits <: ImplicitsApi

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -379,31 +379,58 @@ final class CumulativeFloatLayersSpec

}

"EagerExecution" in {
"eager execution" - {
"single expression" in {
val hyperparameters =
Factory[FloatTraining with Operators with FloatLiterals with CumulativeFloatLayers with ImplicitsSingleton with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0f)

val hyperparameters =
Factory[FloatTraining with Operators with FloatLiterals with CumulativeFloatLayers with ImplicitsSingleton with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0f)
import hyperparameters.implicits._

import hyperparameters.implicits._
val weight = hyperparameters.FloatWeight(1.0f)

val weight = hyperparameters.FloatWeight(1.0f)
def myNetwork(input: Float): hyperparameters.FloatLayer = {
6.7f + !(input + weight) + weight + 5.5f
}

def myNetwork(input: Float): hyperparameters.FloatLayer = {
6.7f + !(input + weight) + weight + 5.5f
}
def train(inputData: Float): Future[Float] = {
myNetwork(inputData).train
}

def train(inputData: Float): Future[Float] = {
myNetwork(inputData).train
for {
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
} yield weight.data should be(-4)
}

for {
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
} yield weight.data should be(-4)
"multiple expression" in {
val hyperparameters =
Factory[Products with FloatTraining with Operators with FloatLiterals with CumulativeFloatLayers with ImplicitsSingleton with FixedLearningRate]
.newInstance(fixedLearningRate = 1.0f)

import hyperparameters.implicits._

val weight = hyperparameters.FloatWeight(1.0f)

def myNetwork(input: Float): hyperparameters.FloatLayer = {
val (a, b, c) = !(input + weight, 2.0f, weight)
6.7f + a + b + c + weight + 5.5f
}

def train(inputData: Float): Future[Float] = {
myNetwork(inputData).train
}

for {
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
_ <- train(1.0f)
} yield weight.data should be(-4)
}
}
}
2 changes: 1 addition & 1 deletion plugins-DoubleWeights/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ libraryDependencies += "com.thoughtworks.feature" %% "implicitapply" % "2.3.0-M8

libraryDependencies += "com.thoughtworks.feature" %% "factory" % "2.3.0-M8"

libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.2"
libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.3"
2 changes: 1 addition & 1 deletion plugins-FloatWeights/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ libraryDependencies += "com.thoughtworks.feature" %% "implicitapply" % "2.3.0-M8

libraryDependencies += "com.thoughtworks.feature" %% "factory" % "2.3.0-M8"

libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.2"
libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.3"
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
package com.thoughtworks.deeplearning.plugins

import com.thoughtworks.continuation._
import com.thoughtworks.deeplearning.DeepLearning
import com.thoughtworks.deeplearning.DeepLearning.Tape
import com.thoughtworks.raii.asynchronous._
import scalaz.Applicative
import scalaz.syntax.all._
import scalaz.Tags.Parallel
import shapeless.{::, HList, HNil}

import java.io.{PrintStream, PrintWriter}

import scalaz.Semigroup

private object HLists {

implicit val doParallelApplicative =
asynchronousDoParallelApplicative(DeepLearning.multipleExceptionThrowableSemigroup)

private val noop: Do[HNil] => UnitContinuation[Unit] = {
Function.const(UnitContinuation.now(()))
}

}

/**
* @author 杨博 (Yang Bo)
*/
trait HLists {
import com.thoughtworks.deeplearning.plugins.HLists._

trait ImplicitsApi {
implicit def hnilDeepLearning[L <: HNil]: DeepLearning.Aux[L, HNil, HNil] = new DeepLearning[L] {
type Data = HNil
type Delta = HNil

def forward(differentiable: L): Do[Tape[Data, Delta]] = {
Do.now(Tape(HNil, noop))
}
}

implicit def hconsDeepLearning[Head, Tail <: HList, HeadData, TailData <: HList, HeadDelta, TailDelta <: HList](
implicit headDeepLearning: DeepLearning.Aux[Head, HeadData, HeadDelta],
tailDeepLearning: DeepLearning.Aux[Tail, TailData, TailDelta])
: DeepLearning.Aux[Head :: Tail, HeadData :: TailData, HeadDelta :: TailDelta] = new DeepLearning[Head :: Tail] {
type Data = HeadData :: TailData
type Delta = HeadDelta :: TailDelta

def forward(differentiable: Head :: Tail): Do[Tape[Data, Delta]] = {
val head :: tail = differentiable
val doHead: ParallelDo[Tape[HeadData, HeadDelta]] = Parallel(headDeepLearning.forward(head))

val doTail: ParallelDo[Tape[TailData, TailDelta]] = Parallel(tailDeepLearning.forward(tail))

Parallel.unwrap(Applicative[ParallelDo].tuple2(doHead, doTail)).map {
case (Tape(headData, headBackward), Tape(tailData, tailBackward)) =>
def backward(doDelta: Do[HeadDelta :: TailDelta]) = {
val continuationHead: ParallelContinuation[Unit] = Parallel(headBackward(doDelta.map(_.head)))
val continuationTail: ParallelContinuation[Unit] = Parallel(tailBackward(doDelta.map(_.tail)))
Parallel.unwrap(continuationParallelApplicative.apply2(continuationHead, continuationTail) {
(_: Unit, _: Unit) =>
()
})
}
Tape(headData :: tailData, backward)
}

}

}
}

type Implicits <: ImplicitsApi

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,43 +23,7 @@ import com.thoughtworks.dsl.Dsl

object INDArrayLayers {

final case class MultipleException(throwableSet: Set[Throwable])
extends RuntimeException("Multiple exceptions found") {
override def toString: String = throwableSet.mkString("\n")

override def printStackTrace(): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace()
}
}

override def printStackTrace(s: PrintStream): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace(s)
}
}

override def printStackTrace(s: PrintWriter): Unit = {
for (throwable <- throwableSet) {
throwable.printStackTrace(s)
}
}

override def getStackTrace: Array[StackTraceElement] = synchronized {
super.getStackTrace match {
case null =>
setStackTrace(throwableSet.flatMap(_.getStackTrace)(collection.breakOut))
super.getStackTrace
case stackTrace =>
stackTrace
}
}

override def fillInStackTrace(): this.type = {
this
}

}
final case class MultipleException(throwableSet: Set[Throwable]) extends DeepLearning.AbstractMultipleException

// Workaround for https://github.com/deeplearning4j/nd4j/issues/1869
private[plugins] implicit final class Nd4jIssues1869Workaround(indArray: INDArray) {
Expand Down Expand Up @@ -134,23 +98,8 @@ trait INDArrayLayers extends DoubleLayers with DoubleLiterals with ImplicitsSing
}

@transient
private lazy val doParallelApplicative =
com.thoughtworks.raii.asynchronous.asynchronousDoParallelApplicative(new Semigroup[Throwable] {
override def append(f1: Throwable, f2: => Throwable): Throwable =
f1 match {
case MultipleException(exceptionSet1) =>
f2 match {
case MultipleException(exceptionSet2) => MultipleException(exceptionSet1 ++ exceptionSet2)
case e: Throwable => MultipleException(exceptionSet1 + e)
}
case _: Throwable =>
f2 match {
case MultipleException(exceptionSet2) => MultipleException(exceptionSet2 + f1)
case `f1` => f1
case e: Throwable => MultipleException(Set(f1, e))
}
}
})
implicit private lazy val doParallelApplicative =
asynchronousDoParallelApplicative(DeepLearning.multipleExceptionThrowableSemigroup)

private def parallelApply2[A, B, C](doA: Do[A], doB: Do[B])(f: (A, B) => C): Do[C] = {
Parallel.unwrap(doParallelApplicative.apply2(Parallel(doA), Parallel(doB))(f))
Expand Down
2 changes: 1 addition & 1 deletion plugins-INDArrayWeights/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ libraryDependencies += "com.thoughtworks.feature" %% "implicitapply" % "2.3.0-M8

libraryDependencies += "com.thoughtworks.feature" %% "factory" % "2.3.0-M8"

libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.2"
libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.3"

libraryDependencies ++= {
import Ordering.Implicits._
Expand Down
2 changes: 1 addition & 1 deletion plugins-Operators/build.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.2"
libraryDependencies += "com.chuusai" %% "shapeless" % "2.3.3"
Loading