-
Notifications
You must be signed in to change notification settings - Fork 0
/
neuralfun.py
50 lines (27 loc) · 952 Bytes
/
neuralfun.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 4 19:14:26 2020
@author: Oscar
"""
from numpy import log, exp, ones
# Logsig activation function -----------------------------------------
def logsig(x):
return 1 / (1 + exp(-x));
def ilogsig(x):
return -log((1/x)-1);
def dlogsig(x):
return 1/((1+exp(-x))**2)*exp(-x);
# ReLu activation function -----------------------------------------
def relu(x):
return log(1+exp(x));
def irelu(x): # El x debe tener valores > 0 porque es el rango de salida de la función ReLu
return log(exp(x)-1);
def drelu(x):
return 1 / (1 + exp(-x)); # It is the logistic function
# Linear activation function -----------------------------------------
def linear(x):
return x;
def ilinear(x):
return x; # the inverse of a linear function is the same function
def dlinear(x):
return ones(len(x));