-
Notifications
You must be signed in to change notification settings - Fork 87
/
INDArrayLayers.scala
94 lines (83 loc) · 3.36 KB
/
INDArrayLayers.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
package com.thoughtworks.deeplearning
package plugins
import com.thoughtworks.deeplearning.DeepLearning.Tape
import com.thoughtworks.raii.asynchronous.Do
import com.thoughtworks.raii.asynchronous.Do._
import com.thoughtworks.raii.shared._
import com.thoughtworks.raii.covariant.{Releasable, ResourceT}
import com.thoughtworks.tryt.covariant.TryT
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.factory.Nd4j
import org.nd4s.Implicits._
import scalaz.concurrent.Future
import scala.util.{Success, Try}
import scalaz.{-\/, \/-}
import scalaz.syntax.all._
private object INDArrayLayers {
private val Zero = Nd4j.zeros(1, 1)
}
/**
* @author 杨博 (Yang Bo) <pop.atry@gmail.com>
*/
trait INDArrayLayers extends RawINDArrayLayers {
import INDArrayLayers._
trait INDArrayLayerApi extends super[RawINDArrayLayers].INDArrayLayerApi {
private lazy val forward0: Do[Tape[INDArray, INDArray]] = {
val Do(future) = super.forward.flatMap {
case Tape(data, flushBackward) =>
Do(Future.delay(new Releasable[Future, Try[Tape[INDArray, INDArray]]] {
@volatile
private var currentDelta: INDArray = INDArrayLayers.Zero
override def value: Try[Tape[INDArray, INDArray]] = {
def cumulativeBackward(doDelta: Do[INDArray]) = {
Do.run(doDelta)
.map {
delta =>
synchronized {
currentDelta = currentDelta match {
case null =>
throw new IllegalStateException("Cannot perform Tape.backward after the Tape is released")
case Zero => delta
case nonZeroDelta =>
def autoBroadcastShape(shape1: Array[Int], shape2: Array[Int]): Array[Int] = {
require(shape1.length == shape2.length)
shape1.zip(shape2).map {
case (1, bSize) => bSize
case (aSize, 1) => aSize
case (aSize, bSize) if aSize == bSize => aSize
}
}
val shape = autoBroadcastShape(nonZeroDelta.shape(), delta.shape())
nonZeroDelta.broadcast(shape: _*) + delta.broadcast(shape: _*)
}
}
}
.get
.map {
case \/-(()) => // Success. Do nothing
case -\/(e) => handleException(e)
}
}
Success(Tape(data, cumulativeBackward))
}
override def release(): Future[Unit] = {
synchronized {
val deltaOption = currentDelta
currentDelta = null
deltaOption
} match {
case Zero =>
Future.now(())
case nonZeroDelta =>
flushBackward(Do.now(nonZeroDelta))
}
}
}))
}
val ResourceT(sharedFuture) = ResourceT(future).shared
Do(sharedFuture)
}
abstract override def forward: Do[DeepLearning.Tape[INDArray, INDArray]] = forward0
}
override type INDArrayLayer <: INDArrayLayerApi with Layer
}