Skip to content
Permalink
Branch: master
Find file Copy path
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
78 lines (59 sloc) 1.55 KB
import numpy as np
from sklearn.datasets import load_boston
from sklearn.utils import shuffle, resample
from ops.input import Input
from ops.linear_algebra.linear import Linear
from ops.activations.sigmoid import Sigmoid
from cost_functions.mse import MSE
from graph.graph import Graph
from optimizers.sgd import SGD
# Load data
data = load_boston()
X_ = data['data']
y_ = data['target']
# Normalize data
X_ = (X_ - np.mean(X_, axis=0)) / np.std(X_, axis=0)
n_features = X_.shape[1]
n_hidden = 10
W1_ = np.random.randn(n_features, n_hidden)
b1_ = np.zeros(n_hidden)
W2_ = np.random.randn(n_hidden, 1)
b2_ = np.zeros(1)
# Neural network
X, y = Input(), Input()
W1, b1 = Input(), Input()
W2, b2 = Input(), Input()
l1 = Linear(X, W1, b1)
s1 = Sigmoid(l1)
l2 = Linear(s1, W2, b2)
cost = MSE(y, l2)
feed_dict = {
X: X_,
y: y_,
W1: W1_,
b1: b1_,
W2: W2_,
b2: b2_
}
epochs = 20
# Total number of examples
m = X_.shape[0]
batch_size = 11
steps_per_epoch = m // batch_size
graph = Graph(feed_dict)
sgd = SGD(1e-2)
trainables = [W1, b1, W2, b2]
print("Total number of examples = {}".format(m))
for i in range(epochs):
loss = 0
for j in range(steps_per_epoch):
# Step 1
# Randomly sample a batch of examples
X_batch, y_batch = resample(X_, y_, n_samples=batch_size)
# Reset value of X and y Inputs
X.output = X_batch
y.output = y_batch
graph.compute_gradients()
sgd.update(trainables)
loss += graph.loss()
print("Epoch: {}, Loss: {:.3f}".format(i + 1, loss / steps_per_epoch))
You can’t perform that action at this time.