From 09171587d32f480a3d9c99a0f4e6602296a07a67 Mon Sep 17 00:00:00 2001 From: saahil-mahato Date: Thu, 3 Oct 2024 21:30:06 +0545 Subject: [PATCH] feat: add maxout activation function --- neural_network/activation_functions/maxout.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 neural_network/activation_functions/maxout.py diff --git a/neural_network/activation_functions/maxout.py b/neural_network/activation_functions/maxout.py new file mode 100644 index 000000000000..329994ad1be8 --- /dev/null +++ b/neural_network/activation_functions/maxout.py @@ -0,0 +1,44 @@ +""" +Maxout activation function + +Use Case: Maxout allows for more flexibility than traditional +activation functions like ReLU and can improve model capacity. + +For more detailed information, you can refer to the following link: +https://arxiv.org/abs/1302.4389 +""" + +import numpy as np + + +def maxout(vector: np.ndarray) -> np.ndarray: + """ + Implements the Maxout Activation Function. + + Parameters: + vector (np.ndarray): The input array for Maxout activation. + + Returns: + np.ndarray: The output of Maxout activation applied to pairs of inputs. + + Formula: f(x) = max(x_1, x_2) + + Examples: + >>> maxout(np.array([[2., -3.], [-1., 4.]])) + array([[2.], + [4.]]) + + >>> maxout(np.array([[5, -5], [3, -3]])) + array([[5], + [3]]) + + """ + return np.maximum( + vector[:, : vector.shape[1] // 2], vector[:, vector.shape[1] // 2 :] + ) + + +if __name__ == "__main__": + import doctest + + doctest.testmod()