Skip to content

Commit 11e6b44

Browse files
tiny update
1 parent 86044b2 commit 11e6b44

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

logistic_regression_class/logistic_donut.py

+3-5
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,7 @@
3636
ones = np.ones((N, 1))
3737

3838
# add a column of r = sqrt(x^2 + y^2)
39-
r = np.zeros((N,1))
40-
for i in xrange(N):
41-
r[i] = np.sqrt(X[i,:].dot(X[i,]))
39+
r = np.sqrt( (X * X).sum(axis=1) ).reshape(-1, 1)
4240
Xb = np.concatenate((ones, r, X), axis=1)
4341

4442
# randomly initialize the weights
@@ -62,7 +60,7 @@ def cross_entropy(T, Y):
6260
# else:
6361
# E -= np.log(1 - Y[i])
6462
# return E
65-
return (T*np.log(Y) + (1-T)*np.log(1-Y)).sum()
63+
return -(T*np.log(Y) + (1-T)*np.log(1-Y)).sum()
6664

6765

6866
# let's do gradient descent 100 times
@@ -71,7 +69,7 @@ def cross_entropy(T, Y):
7169
for i in xrange(5000):
7270
e = cross_entropy(T, Y)
7371
error.append(e)
74-
if i % 100 == 0:
72+
if i % 500 == 0:
7573
print e
7674

7775
# gradient descent weight udpate with regularization

0 commit comments

Comments
 (0)