From a717983198276cc8b789184a085fe680fc8d07ed Mon Sep 17 00:00:00 2001 From: Anselm Levskaya Date: Fri, 26 May 2023 15:43:10 -0700 Subject: [PATCH] Update activation.py docstring for PReLU Added a usage note for PReLU clarifying the need for module initialization. --- flax/linen/activation.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/flax/linen/activation.py b/flax/linen/activation.py index 3c56c0e76..d68486948 100644 --- a/flax/linen/activation.py +++ b/flax/linen/activation.py @@ -59,6 +59,12 @@ class PReLU(Module): """Parametric Rectified Linear Unit (PReLU) activation function. + Note that PReLU is a Flax layer and not a simple activation function, so + it needs to be initialized before being called. + + Example usage:: + x = nn.PReLU()(x) + Attributes: param_dtype: the dtype passed to parameter initializers (default: float32). negative_slope_init: the value to initialize the negative slope