forked from lazyprogrammer/machine_learning_examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathforwardprop.py
60 lines (47 loc) · 1.53 KB
/
forwardprop.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# forward propagation example for deep learning in python class.
#
# the notes for this class can be found at:
# https://deeplearningcourses.com/c/data-science-deep-learning-in-python
# https://www.udemy.com/data-science-deep-learning-in-python
import numpy as np
import matplotlib.pyplot as plt
Nclass = 500
X1 = np.random.randn(Nclass, 2) + np.array([0, -2])
X2 = np.random.randn(Nclass, 2) + np.array([2, 2])
X3 = np.random.randn(Nclass, 2) + np.array([-2, 2])
X = np.vstack([X1, X2, X3])
Y = np.array([0]*Nclass + [1]*Nclass + [2]*Nclass)
# let's see what it looks like
plt.scatter(X[:,0], X[:,1], c=Y, s=100, alpha=0.5)
plt.show()
# randomly initialize weights
D = 2 # dimensionality of input
M = 3 # hidden layer size
K = 3 # number of classes
W1 = np.random.randn(D, M)
b1 = np.random.randn(M)
W2 = np.random.randn(M, K)
b2 = np.random.randn(K)
def sigmoid(a):
return 1 / (1 + np.exp(-a))
def forward(X, W1, b1, W2, b2):
Z = sigmoid(X.dot(W1) + b1)
A = Z.dot(W2) + b2
expA = np.exp(A)
Y = expA / expA.sum(axis=1, keepdims=True)
return Y
# determine the classification rate
# num correct / num total
def classification_rate(Y, P):
n_correct = 0
n_total = 0
for i in xrange(len(Y)):
n_total += 1
if Y[i] == P[i]:
n_correct += 1
return float(n_correct) / n_total
P_Y_given_X = forward(X, W1, b1, W2, b2)
P = np.argmax(P_Y_given_X, axis=1)
# verify we chose the correct axis
assert(len(P) == len(Y))
print "Classification rate for randomly chosen weights:", classification_rate(Y, P)