-
Notifications
You must be signed in to change notification settings - Fork 0
/
neurotrans.py
122 lines (86 loc) · 2.39 KB
/
neurotrans.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#\|
#=#| Author: Danny Ly MugenKlaus|RedKlouds
#=#| File: neurotrans.py
#=#| Date: 12/8/2017
#=#|
#=#| Program Desc: A object where all transfer functions are defined,
#=#| Helper class.
#=#|
#=#| Usage: Used when implementing layers with activation functions.
#=#|
#=#| Precondition: Requires Numpy library
#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#=#\|
import numpy as np
class Compet:
"""
Competitive Transfer function
-returning the maximum value in the array
Usage:
p = [12,3,4,-1,5]
compet = Compet()
a = compet(p)
a = [1,0,0,0,0]
"""
def __call__(self, inputVector):
r = np.zeros_like(inputVector)
max = np.argmax(inputVector) # get the index with the maximum value
r[max] = 1.0
return r # returns the maximum value the winning nurons index
class PureLin:
"""
Pure Linear Transfer function
from neurotrans import PureLin
Usage:
p = [1,2,3,4,5]
purelin = PureLin()
a = purelin(p)
a = [1,2,3,4,5]
"""
def __call__(self, inputVector):
return inputVector.copy()
class HardLim:
"""
Hard limit Transfer Function
Usage:
p = [1,2,3,-5,-6,0]
hardlim = HardLim()
a = hardlim(p)
a = [1,1,1,0,0,1]
"""
def __call__(self, inputVector):
r = np.zeros_like(inputVector)
for i in range(len(inputVector)):
if inputVector[i] >= 0:
r[i] = 1
return r
class HardLims:
"""
Symmetric hard Limit Function
Usage:
from neurotrans import HardLims
p = [-1,2,3,4,0,-2]
hardlims = HardLims()
a = hardlims(p)
a = [-1,1,1,1,1,-1]
"""
def __call__(self, inputVector):
r = np.ones_like(inputVector)
for i in range(len(inputVector)):
if inputVector[i] < 0:
r[i] = -1
return r
class LogSig:
"""
Log sigmoid Function
Usage:
from neutotrans import LogSig
p = [12,3,4,5,6]
logsig = LogSig()
a = logsig(p)
a = [ 0.99999386, 0.95257413, 0.98201379, 0.99330715, 0.99752738]
"""
def __call__(self, inputvector):
x = inputvector.copy()
return 1 / (1 + np.exp( -x) )
def derivative(self, inputVector):
return inputVector * (1 - inputVector)