-
Notifications
You must be signed in to change notification settings - Fork 0
/
Weight_calc_example_linear.py
71 lines (56 loc) · 1.16 KB
/
Weight_calc_example_linear.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import numpy as np
import matplotlib.pyplot as plt
#function to fit
x=np.arange(1,11) #input x=[1,2,...10]
y=2.*x #output
#learning rate
alpha=0.01
#criteria for cost function J
J_CRIT=0.01
#def cost function
def J_calc(a,y):
J=0.5*np.mean( (a-y)**2. ) #Mean Squared Error
return J
#Step 1: Initate w (weight), b (bias)
w=1
b=0
g=1 #linear activation function
J=100
w_list=[w]
b_list=[b]
while J>J_CRIT:
#Step 2: Propage
a2=g*(w*x+b)
#Step 3: calculate cost function
J=J_calc(a2,y)
#Step 4: calculate the gradient
#4.1.1 dJ_dw=dJ/dw
dJ_dw=np.mean((a2-y)*x)
#4.1.2 dw=-alpha*(dJ/dw)
dw=-alpha*dJ_dw
#4.2.1 dJ_db=dJ/db
dJ_db=np.mean(a2-y)
#4.2.2 db=-alpha*(dJ/db)
db=-alpha*dJ_db
#4.3 update w and b
w=w+dw
b=b+db
w_list.append(w)
b_list.append(b)
print('w_list: ')
print(w_list)
print('b_list: ')
print(b_list)
x=np.arange(0,11,1) #include 0 to plotting
plt.clf()
for (w,b) in zip(w_list,b_list):
plt.plot(x,w*x+b,color='blue',alpha=0.3)
plt.plot(x,w_list[-1]*x+b_list[-1],color='blue',label='final fit')
plt.scatter(x,2*x,color='red',label='data')
plt.grid(alpha=0.3)
plt.xlabel('x')
plt.ylabel('y')
plt.xlim(0,10)
plt.ylim(0,20)
plt.legend()
plt.show()