From 6a5fd0bb1eee7a94e5a4629ca39442472bbdea3a Mon Sep 17 00:00:00 2001 From: WuZhuoran Date: Fri, 5 Jul 2019 16:02:50 -0700 Subject: [PATCH 1/3] feat: add Elu activations --- neural_nets/README.md | 1 + neural_nets/activations/activations.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/neural_nets/README.md b/neural_nets/README.md index 07754bb..60356d2 100644 --- a/neural_nets/README.md +++ b/neural_nets/README.md @@ -10,6 +10,7 @@ emphasize conceptual understanding over flexibility. - Hyperbolic tangent (tanh) - Logistic sigmoid - Affine + - Exponential Linear Units (Elu) [Djork-Arné Clevert et al., 2015](http://arxiv.org/abs/1511.07289) 2. **Losses**. Common loss functions. Includes: - Squared error diff --git a/neural_nets/activations/activations.py b/neural_nets/activations/activations.py index 229a75d..4e2241e 100644 --- a/neural_nets/activations/activations.py +++ b/neural_nets/activations/activations.py @@ -128,3 +128,22 @@ def grad(self, x): def grad2(self, x): return np.zeros_like(x) + + +class Elu(ActivationBase): + def __init__(self, alpha=1.0): + self.alpha = alpha + super().__init__() + + def __str__(self): + return "Elu(alpha={})".format(self.alpha) + + def fn(self, z): + _z = z * (z > 0) + self.alpha * (np.exp(z) - 1.) * (z < 0) + return _z + + def grad(self, x): + return 1 if x >= 0 else self.fn(x) + self.alpha + + def grad2(self, x): + return np.zeros_like(x) From 151b5492c6c7e79b8b0f00bdba9ce62c006f408f Mon Sep 17 00:00:00 2001 From: Wu Zhuoran Date: Sat, 6 Jul 2019 15:41:30 -0700 Subject: [PATCH 2/3] Update neural_nets/activations/activations.py Co-Authored-By: David Bourgin --- neural_nets/activations/activations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neural_nets/activations/activations.py b/neural_nets/activations/activations.py index 4e2241e..eb414cc 100644 --- a/neural_nets/activations/activations.py +++ b/neural_nets/activations/activations.py @@ -143,7 +143,7 @@ def fn(self, z): return _z def grad(self, x): - return 1 if x >= 0 else self.fn(x) + self.alpha + return np.where(x >= 0, np.ones_like(x), self.fn(x) + self.alpha) def grad2(self, x): return np.zeros_like(x) From ecb8488ffe563d3fe402f9f058739782dcd5f935 Mon Sep 17 00:00:00 2001 From: Wu Zhuoran Date: Sat, 6 Jul 2019 15:41:36 -0700 Subject: [PATCH 3/3] Update neural_nets/activations/activations.py Co-Authored-By: David Bourgin --- neural_nets/activations/activations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neural_nets/activations/activations.py b/neural_nets/activations/activations.py index eb414cc..4c2f1f4 100644 --- a/neural_nets/activations/activations.py +++ b/neural_nets/activations/activations.py @@ -130,7 +130,7 @@ def grad2(self, x): return np.zeros_like(x) -class Elu(ActivationBase): +class ELU(ActivationBase): def __init__(self, alpha=1.0): self.alpha = alpha super().__init__()