Skip to content

Commit

Permalink
RNN
Browse files Browse the repository at this point in the history
  • Loading branch information
pchavanne committed Jan 31, 2017
1 parent 981804b commit 27c87e2
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 19 deletions.
27 changes: 13 additions & 14 deletions yadll/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


def get_activation(activator):
"""
r"""
Call an activation function from an activator object
Parameters
Expand All @@ -27,8 +27,8 @@ def get_activation(activator):


def linear(x):
"""Linear activation function
:math:`\\varphi(x) = x`
r"""Linear activation function
:math:`\varphi{x} = x`
Parameters
----------
Expand All @@ -45,8 +45,8 @@ def linear(x):


def sigmoid(x):
"""Sigmoid function
:math:`\\varphi(x) = \\frac{1}{1 + e^{-x}}`
r"""Sigmoid function
:math:`\varphi(x) = \frac{1}{1 + e^{-x}}`
Parameters
----------
Expand All @@ -63,8 +63,8 @@ def sigmoid(x):


def ultra_fast_sigmoid(x):
"""Ultra fast Sigmoid function return an approximated standard sigmoid
:math:`\\varphi(x) = \\frac{1}{1 + e^{-x}}`
r"""Ultra fast Sigmoid function return an approximated standard sigmoid
:math:`\varphi(x) = \frac{1}{1 + e^{-x}}`
Parameters
----------
Expand All @@ -87,7 +87,7 @@ def ultra_fast_sigmoid(x):

def tanh(x):
"""Tanh activation function
:math:`\\varphi(x) = \\tanh(x)`
:math:`\varphi(x) = \tanh(x)`
Parameters
----------
Expand All @@ -104,9 +104,8 @@ def tanh(x):


def softmax(x):
"""Softmax activation function
:math:`\\varphi(\\mathbf{x})_j =
\\frac{e^{\mathbf{x}_j}}{\sum_{k=1}^K e^{\mathbf{x}_k}}`
r"""Softmax activation function
:math:`\varphi(x)_j = \frac{\exp{x_j}}{\sum_{k=1}^K \exp{x_k}}`
where :math:`K` is the total number of neurons in the layer. This
activation function gets applied row-wise.
Expand All @@ -125,7 +124,7 @@ def softmax(x):


def softplus(x):
"""Softplus activation function :math:`\\varphi(x) = \\log(1 + e^x)`
r"""Softplus activation function :math:`\varphi{x} = \log{1 + \exp{x}}`
Parameters
----------
Expand All @@ -142,8 +141,8 @@ def softplus(x):


def relu(x, alpha=0):
"""Rectified linear unit activation function
:math:`\\varphi(x) = \\max(alpha * x, x)`
r"""Rectified linear unit activation function
:math:`\varphi{x} = \max{x, \alpha * x}`
Parameters
----------
Expand Down
10 changes: 5 additions & 5 deletions yadll/objectives.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def mean_absolute_error(prediction, target):
def binary_hinge_error(prediction, target):
r"""
Binary Hinge Error: BHE
.. math:: hinge_i = \frac{1}{n} \sum_{j}{\max(1. - target_{i,j} * prediction_{i,j}, 0.)}
.. math:: BHE_i = \frac{1}{n} \sum_{j}{\max(1. - target_{i,j} * prediction_{i,j}, 0.)}
"""
return T.mean(T.maximum(1. - target * prediction, 0.), axis=-1)
Expand All @@ -50,7 +50,7 @@ def binary_hinge_error(prediction, target):
def categorical_hinge_error(prediction, target):
r"""
Categorical Hinge Error: CHE
.. math:: hinge_i = \frac{1}{n} \sum_{j}{\max(1. - target_{i,j} * prediction_{i,j}, 0.)}
.. math:: CHE_i = \frac{1}{n} \sum_{j}{\max(1. - target_{i,j} * prediction_{i,j}, 0.)}
"""
return T.mean(T.maximum(1. - target * prediction, 0.), axis=-1)
Expand All @@ -60,8 +60,8 @@ def binary_crossentropy_error(prediction, target):
r"""
Binary Cross-entropy Error: BCE
.. math:: BCE_i = \frac{1}{n} \sum_{j}{-(target_{i,j} * \log(prediction_{i,j})
+ (1 - target_{i,j}) * \log(1 - prediction_{i,j}))}
.. math:: BCE_i = - \frac{1}{n} \sum_{j}{target_{i,j} * \log(prediction_{i,j}
- (1 - target_{i,j}) * \log(1 - prediction_{i,j}))}
"""
clip_pred = T.clip(prediction, EPSILON, 1 - EPSILON)
Expand All @@ -72,7 +72,7 @@ def categorical_crossentropy_error(prediction, target):
r"""
Categorical Cross-entropy Error: CCE
.. math:: MAE_i = \frac{1}{n} \sum_{j}{\big|{target_{i,j} - prediction_{i,j}\big|}
.. math:: CCE_i = - \frac{1}{n} \sum_{j}{target_{i,j} * \log(prediction_{i,j})}
"""
prediction /= prediction.sum(axis=-1, keepdims=True)
Expand Down

0 comments on commit 27c87e2

Please sign in to comment.