Skip to content

Commit

Permalink
RNN
Browse files Browse the repository at this point in the history
  • Loading branch information
pchavanne committed Jan 28, 2017
1 parent d822e57 commit 3f34498
Showing 1 changed file with 10 additions and 13 deletions.
23 changes: 10 additions & 13 deletions yadll/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,13 +602,13 @@ def get_unsupervised_cost(self, persistent=None, k=1, **kwargs):


class BatchNormalization(Layer):
"""
r"""
Normalize the input layer over each mini-batch according to [1]_:
.. math::
\\hat{x} = \\frac{x - E[x]}{\\sqrt(Var[x] + \\epsilon)}
\hat{x} = \frac{x - E[x]}{\sqrt(Var[x] + \epsilon)}
y = \\gamma * \\hat{x} + \\beta
y = \gamma * \hat{x} + \beta
References
----------
Expand Down Expand Up @@ -645,7 +645,7 @@ def get_output(self, stochastic=True, **kwargs):


class RNN(Layer):
"""
r"""
Recurrent Neural Network
.. math ::
Expand Down Expand Up @@ -699,15 +699,12 @@ class LSTM(Layer):
Long Short Term Memory
.. math ::
i_t &= \sigma_i(ax_t W_{xi} + h_{t-1} W_{hi}
+ w_{ci} \odot c_{t-1} + b_i)\\
f_t &= \sigma_f(x_t W_{xf} + h_{t-1} W_{hf}
+ w_{cf} \odot c_{t-1} + b_f)\\
c_t &= f_t \odot c_{t - 1}
+ i_t \odot \sigma_c(x_t W_{xc} + h_{t-1} W_{hc} + b_c)\\
o_t &= \sigma_o(x_t W_{xo} + h_{t-1} W_{ho} + w_{co} \odot c_t + b_o)\\
h_t &= o_t \odot \sigma_h(c_t)
i_t &= \sigma(x_t.W_{xi} + h_{t-1}.W_{hi} + b_i)\\
f_t &= \sigma(x_t.W_{xf} + h_{t-1}.W_{hf} + b_f)\\
C_t &= \sigma(x_t.W_{xc} + h_{t-1}.W_{hc} + b_c)\\
C_t &= f_t * C_{t-1} + i_t * \tilde{C_t}\\
o_t &= \sigma(x_t.W_{xo} + h_{t-1}.W_{ho} + b_o)\\
h_t &= o_t * tanh(C_t)
Parameters
----------
Expand Down

0 comments on commit 3f34498

Please sign in to comment.