/
single_perceptron_example.py
54 lines (45 loc) · 1.4 KB
/
single_perceptron_example.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# Single Perceptron Example (ATD E10.01)
"""
Definition: For a single perceptron, find an assignment to the parameters w0, w1, w2 such that the
perceptron implements the boolean function y(x1, x2) = x1 ^ ¬x2 for binary variables x1
and x2. (Use the Heaviside step function φ(x) = max(sign(x), 0) as threshold function.)
"""
def sign(y):
# signum function
if y < 0:
return -1
elif y == 0:
return 0
else:
return 1
def heaviside(y):
# Heaviside step function
return max(sign(y), 0)
def learningFunction(x, t, w, n):
# learning function
for i in range(4):
y = (w[0] * x[0][i]) + (w[1] * x[1][i]) + (w[2] * x[2][i])
error = t[i] - heaviside(y)
for j in range(3):
dW = n * error * x[j][i]
w[j] += dW
return w
if __name__ == "__main__":
"""
Intialization:
x1: 0 0 1 1
x2: 0 1 0 1
t: 0 0 1 0 (target values)
learning rate: n = 0.5
initial weights: w0 = 0.5, w1 = 0.5, w2 = 0.5
"""
x0 = [1, 1, 1, 1]
x1 = [0, 0, 1, 1]
x2 = [0, 1, 0, 1]
t = [0, 0, 1, 0]
n = 0.4
w = learningFunction([x0, x1, x2], t, [0.5, 1, -1], n)
print("w0 = {:.2f}, w1 = {:.2f}, w2 = {:.2f}".format(w[0], w[1], w[2]))
for i in range(4):
y = heaviside((w[0]) + (w[1] * x1[i]) + (w[2] * x2[i]))
print("For X1 = {}, X2 = {} | y = {:.2f}, t = {}".format(x1[i], x2[i], y, t[i]))