-
Notifications
You must be signed in to change notification settings - Fork 9
/
iris_linreg_np.py
77 lines (57 loc) · 1.68 KB
/
iris_linreg_np.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
"""
Linear regression with gradient descent (NumPy).
"""
import numpy as np
import matplotlib.pyplot as plt
# Load the data
data = np.loadtxt('datasets/iris/iris.txt', skiprows=1)
x_data = data[:,0]
y_data = data[:,1]
#-------------------------------------------------------------------------------
# Fit
#-------------------------------------------------------------------------------
def predict(x, W, b):
return W*x + b
def compute_loss(x, W, b, y):
y_pred = predict(x, W, b)
error = y_pred - y
return 0.5 * np.mean(error**2)
def compute_gradients(x, W, b, y):
y_pred = predict(x, W, b)
error = y_pred - y
dLdW = np.mean(error * x)
dLdb = np.mean(error)
return dLdW, dLdb
# Hyperparameters
learning_rate = 0.01
num_epochs = 10000
# Starting values
W = 0.0
b = 0.0
# Minimize the loss function
for epoch in range(num_epochs):
grads = compute_gradients(x_data, W, b, y_data)
W -= learning_rate * grads[0]
b -= learning_rate * grads[1]
if (epoch+1) % 1000 == 0:
loss = compute_loss(x_data, W, b, y_data)
print("After {} epochs, loss = {}".format(epoch+1, loss))
# Print the result
print("W =", W)
print("b =", b)
#-------------------------------------------------------------------------------
# Figure
#-------------------------------------------------------------------------------
# Plot the data
plt.plot(x_data, y_data, 'o', label='Data')
# Plot the fit
x_fit = np.linspace(x_data.min(), x_data.max())
y_fit = predict(x_fit, W, b)
plt.plot(x_fit, y_fit, label='Fit')
# Legend
plt.legend()
# Axis labels
plt.xlabel("Sepal length (cm)")
plt.ylabel("Petal legnth (cm)")
# Save figure
plt.savefig('figs/iris_linreg_np.png')