diff --git a/algorithms/machine_learning/Activation Functions/GELU.m b/algorithms/machine_learning/Activation Functions/GELU.m new file mode 100644 index 0000000..32cbca6 --- /dev/null +++ b/algorithms/machine_learning/Activation Functions/GELU.m @@ -0,0 +1,7 @@ +%GELU function +x = -10:0.01:10; +y = (1/2*x).*(1+tanh(sqrt(2/pi)*(x+0.044715*power(x,3)))); +plot(x,y); +xlabel('x'); +ylabel('y'); +grid on