Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added support for Swish activation function. #63

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions mlfromscratch/deep_learning/activation_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,19 @@ def __call__(self, x):
def gradient(self, x):
return 1 / (1 + np.exp(-x))

class Swish():
# Reference : https://arxiv.org/abs/1710.05941v1
def __init__(self):
self.beta = 1

def __call__(self, x):
return x * self.__sigmoid(self.beta * x)

def __sigmoid(self, x):
return 1 / (1 + np.exp(-x))

def gradient(self, x):
betaX = self.beta * x
sig = self.__sigmoid(betaX)
return betaX * (sig * (1 - sig)) + sig

5 changes: 3 additions & 2 deletions mlfromscratch/deep_learning/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import math
import numpy as np
import copy
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, LeakyReLU
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, LeakyReLU, Swish
from mlfromscratch.deep_learning.activation_functions import TanH, ELU, SELU, Softmax


Expand Down Expand Up @@ -610,7 +610,8 @@ def output_shape(self):
'softmax': Softmax,
'leaky_relu': LeakyReLU,
'tanh': TanH,
'softplus': SoftPlus
'softplus': SoftPlus,
'swish': Swish
}

class Activation(Layer):
Expand Down
4 changes: 2 additions & 2 deletions mlfromscratch/supervised_learning/perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# Import helper functions
from mlfromscratch.utils import train_test_split, to_categorical, normalize, accuracy_score
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, LeakyReLU, TanH, ELU
from mlfromscratch.deep_learning.activation_functions import Sigmoid, ReLU, SoftPlus, Swish, LeakyReLU, TanH, ELU
from mlfromscratch.deep_learning.loss_functions import CrossEntropy, SquareLoss
from mlfromscratch.utils import Plot
from mlfromscratch.utils.misc import bar_widgets
Expand All @@ -19,7 +19,7 @@ class Perceptron():
The number of training iterations the algorithm will tune the weights for.
activation_function: class
The activation that shall be used for each neuron.
Possible choices: Sigmoid, ExpLU, ReLU, LeakyReLU, SoftPlus, TanH
Possible choices: Sigmoid, ExpLU, ReLU, LeakyReLU, SoftPlus, TanH, Swish
loss: class
The loss function used to assess the model's performance.
Possible choices: SquareLoss, CrossEntropy
Expand Down