Skip to content

Commit

Permalink
NIPS evaluation
Browse files Browse the repository at this point in the history
  • Loading branch information
feiwang3311 committed Oct 13, 2018
1 parent b69666c commit 35ca374
Show file tree
Hide file tree
Showing 22 changed files with 35,566 additions and 11,461 deletions.
8 changes: 1 addition & 7 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,10 +1,4 @@
*.bin
.*.sw*
target/
src/out/ICFP18evaluation/
src/out/NIPS18evaluation/save_fig/
src/out/NIPS18evaluation/evaluationRNN/
src/out/NIPS18evaluation/evaluationLSTM/
src/out/NIPS18evaluation/evaluationCNN/
src/out/NIPS18evaluation/evaluationTreeLSTM/Dynet/
src/out/NIPS18evaluation/evaluationTreeLSTM/Lantern/
src/out/
34 changes: 30 additions & 4 deletions src/main/scala/lantern/NNModule.scala
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ trait NNModule extends TensorDsl {
def apply(ins: ArrayBuffer[TensorR]): ArrayBuffer[TensorR] @diff
}

case class VanillaRNNCell(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val batchFirst: Boolean = false, val name: String = "vanilla_rnn_cell") extends RnnCell {
case class VanillaRNNCell(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val name: String = "vanilla_rnn_cell") extends RnnCell {
val inLinear = Linear1D2(inputSize, hiddenSize, hiddenSize)
val outLinear = Linear1D(hiddenSize, outputSize)
def apply(ins: ArrayBuffer[TensorR]): ArrayBuffer[TensorR] @diff = {
Expand All @@ -108,9 +108,9 @@ trait NNModule extends TensorDsl {
def init(batchSize: Int) = ArrayBuffer(TensorR(Tensor.zeros(batchSize, hiddenSize)))
}

case class VanillaRNNCellTrans(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val batchFirst: Boolean = false, val name: String = "vanilla_rnn_cell_trans") extends RnnCell {
case class VanillaRNNCellTrans(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val name: String = "vanilla_rnn_cell_trans") extends RnnCell {
val inLinear = Linear1D2Trans(inputSize, hiddenSize, hiddenSize)
val outLinear = Linear1D(hiddenSize, outputSize)
val outLinear = Linear1DTrans(hiddenSize, outputSize)
def apply(ins: ArrayBuffer[TensorR]): ArrayBuffer[TensorR] @diff = {
assert(ins.size == 2, "vanilla rnn cell trans should take a input of two tensors, the next element, and the last hidden layer")
val in = ins(0)
Expand All @@ -121,7 +121,7 @@ trait NNModule extends TensorDsl {
def init(batchSize: Int) = ArrayBuffer(TensorR(Tensor.zeros(batchSize, hiddenSize)))
}

case class LSTMCell(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val batchFirst: Boolean = false, val name: String = "lstm_cell") extends RnnCell {
case class LSTMCell(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val name: String = "lstm_cell") extends RnnCell {
val scale1: Float = 1.0f / sqrt(inputSize).toFloat
val scale2: Float = 1.0f / sqrt(hiddenSize).toFloat

Expand All @@ -147,6 +147,32 @@ trait NNModule extends TensorDsl {
def init(batchSize: Int) = ArrayBuffer(TensorR(Tensor.zeros(batchSize, hiddenSize)), TensorR(Tensor.zeros(batchSize, hiddenSize)))
}

case class LSTMCellTrans(val inputSize: Int, val hiddenSize: Int, val outputSize: Int, val name: String = "lstm_cell_trans") extends RnnCell {
val scale1: Float = 1.0f / sqrt(inputSize).toFloat
val scale2: Float = 1.0f / sqrt(hiddenSize).toFloat

// initialize all parameters
val fGate = Linear1D2Trans(inputSize, hiddenSize, hiddenSize)
val iGate = Linear1D2Trans(inputSize, hiddenSize, hiddenSize)
val cGate = Linear1D2Trans(inputSize, hiddenSize, hiddenSize)
val oGate = Linear1D2Trans(inputSize, hiddenSize, hiddenSize)
val outLinear = Linear1DTrans(hiddenSize, outputSize)
def apply(ins: ArrayBuffer[TensorR]): ArrayBuffer[TensorR] @diff = {
assert(ins.size == 3, "LSTM cell should take a input of three tensors, the next element, the last hidden layer, and the last cell layer")
val in = ins(0)
val lastHidden = ins(1)
val lastCell = ins(2)
val f = fGate(in, lastHidden).sigmoid()
val i = iGate(in, lastHidden).sigmoid()
val o = oGate(in, lastHidden).sigmoid()
val C = cGate(in, lastHidden).tanh()
val c = f * lastCell + i * C
val h = o * c.tanh()
ArrayBuffer(outLinear(h), h, c)
}
def init(batchSize: Int) = ArrayBuffer(TensorR(Tensor.zeros(batchSize, hiddenSize)), TensorR(Tensor.zeros(batchSize, hiddenSize)))
}

// case class DynamicRNN(val cell: RnnCell, val name: String = "dynamic_rnn_unroll") extends Module {
// @virtualize
// override def apply(input: TensorR, target: Rep[Array[Int]], lengths: Option[Rep[Array[Int]]] = None, batchFirst: Boolean = false): ArrayBuffer[TensorR] @diff = {
Expand Down
Loading

0 comments on commit 35ca374

Please sign in to comment.