Skip to content

Commit

Permalink
expanded forward propagation tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
bfortuner committed Apr 24, 2017
1 parent 23b67cd commit a2ddc6b
Show file tree
Hide file tree
Showing 16 changed files with 229 additions and 47 deletions.
76 changes: 40 additions & 36 deletions code/nn_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,49 +3,33 @@

# Neural Network w Matrices

**
X - Input matrix from training set
Zh - Hidden layer weighted input matrix
Zo - Output layer weighted input matrix
Bh - Hidden layer bias matrix
Bo - Output layer bias matrix
H - Hidden layer activation matrix
yHat - Output layer predictions
**

# Initialize Weights
Wh = np.random.randn(inputLayerSize, hiddenLayerSize) * \
np.sqrt(2.0/inputLayerSize)
Wo = np.random.randn(hiddenLayerSize, outputLayerSize) * \
np.sqrt(2.0/hiddenLayerSize)

# Initialize Biases
Bh = np.full((1, hiddenLayerSize), 0.1)
Bo = np.full((1, outputLayerSize), 0.1)
INPUT_LAYER_SIZE = 1
HIDDEN_LAYER_SIZE = 2
OUTPUT_LAYER_SIZE = 2

def init_weights():
Wh = np.random.randn(INPUT_LAYER_SIZE, HIDDEN_LAYER_SIZE) * \
np.sqrt(2.0/INPUT_LAYER_SIZE)
Wo = np.random.randn(HIDDEN_LAYER_SIZE, OUTPUT_LAYER_SIZE) * \
np.sqrt(2.0/HIDDEN_LAYER_SIZE)

def relu(Z):
return np.maximum(0, Z)

def feed_forward(X):

# Hidden layer
Zh = np.dot(X, Wh) + Bh
H = relu(Zh)
def init_bias():
Bh = np.full((1, HIDDEN_LAYER_SIZE), 0.1)
Bo = np.full((1, OUTPUT_LAYER_SIZE), 0.1)
return Bh, Bo

# Output layer
Zo = np.dot(H, Wo) + Bo
yHat = relu(Zo)
return yHat
def relu(Z):
return np.maximum(0, Z)

def relu_prime(Z):
**
'''
Z - weighted input matrix
Returns the gradient of the
Z matrix where all negative
values are switched to 0 and
all positive values switched to 1
**
Returns gradient of Z where all
negative values are set to 0 and
all positive values set to 1
'''
Z[Z < 0] = 0
Z[Z > 0] = 1
return Z
Expand All @@ -57,6 +41,24 @@ def cost(yHat, y):
def cost_prime(yHat, y):
return yHat - y

def feed_forward(X):
'''
X - input matrix
Zh - hidden layer weighted input
Zo - output layer weighted input
H - hidden layer activation
yHat - output layer predictions
'''

# Hidden layer
Zh = np.dot(X, Wh) + Bh
H = relu(Zh)

# Output layer
Zo = np.dot(H, Wo) + Bo
yHat = relu(Zo)
return yHat

def backprop(X, y, lr):

yHat = feed_forward(X)
Expand All @@ -80,3 +82,5 @@ def backprop(X, y, lr):
# Update biases
Bo -= lr * dBo
Bh -= lr * dBh


13 changes: 13 additions & 0 deletions docs/_static/theme_overrides.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/* override table width restrictions */
@media screen and (min-width: 767px) {

.wy-table-responsive table td {
/* !important prevents the common CSS stylesheets from overriding
this as on RTD they are loaded after this stylesheet */
white-space: normal !important;
}

.wy-table-responsive {
overflow: visible !important;
}
}
8 changes: 8 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,14 @@
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']

html_static_path = ['_static']

html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}

# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'

Expand Down
Binary file not shown.
File renamed without changes.
1 change: 1 addition & 0 deletions docs/figures/forward_prop_matrix_dimensions_table.tgn
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"rows_views":[[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"middle","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}],[{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}},{"style":{"borders":"lrtb","font_style":{},"text_color":"","bg_color":"","halign":"left","valign":"top","padding":{"top":10,"right":5,"bottom":10,"left":5}}}]],"model":{"rows":[[{"value":"**Var**","cspan":1,"rspan":1},{"value":"**Name**","cspan":1,"rspan":1},{"value":"**Dimensions**","cspan":1,"rspan":1},{"value":"**Explanation**","cspan":1,"rspan":1}],[{"value":"``X``","cspan":1,"rspan":1},{"value":"Input","cspan":1,"rspan":1},{"value":"(3, 1)","cspan":1,"rspan":1},{"value":"Includes 3 rows of training data, and each row has 1 attribute (height, price, etc.)","cspan":1,"rspan":1}],[{"value":"``Wh``","cspan":1,"rspan":1},{"value":"Hidden weights","cspan":1,"rspan":1},{"value":"(1, 2)","cspan":1,"rspan":1},{"value":"These dimensions are based on number of rows equals the number of attributes for the observations in our training set. The number columns equals the number of neurons in the hidden layer. The dimensions of the weights matrix between two layers is determined by the sizes of the two layers it connects. There is one weight for every input-to-neuron connection between the layers.","cspan":1,"rspan":1}],[{"value":"``Bh``","cspan":1,"rspan":1},{"value":"Hidden bias","cspan":1,"rspan":1},{"value":"(1, 2)","cspan":1,"rspan":1},{"value":"Each neuron in the hidden layer has is own bias constant. This bias matrix is added to the weighted input matrix before the hidden layer applies ReLU.","cspan":1,"rspan":1}],[{"value":"``Zh``","cspan":1,"rspan":1},{"value":"Hidden weighted input","cspan":1,"rspan":1},{"value":"(1, 2)","cspan":1,"rspan":1},{"value":"Computed by taking the dot product of X and Wh. The dimensions (1,2) are required by the rules of matrix multiplication. Zh takes the rows of in the inputs matrix and the columns of weights matrix. We then add the hidden layer bias matrix Bh.","cspan":1,"rspan":1}],[{"value":"``H``","cspan":1,"rspan":1},{"value":"Hidden activations","cspan":1,"rspan":1},{"value":"(3, 2)","cspan":1,"rspan":1},{"value":"Computed by applying the Relu function to Zh. The dimensions are (3,2) — the number of rows matches the number of training samples and the number of columns equals the number of neurons. Each column holds all the activations for a specific neuron.","cspan":1,"rspan":1}],[{"value":"``Wo``","cspan":1,"rspan":1},{"value":"Output weights","cspan":1,"rspan":1},{"value":"(2, 2)","cspan":1,"rspan":1},{"value":"The number of rows matches the number of hidden layer neurons and the number of columns equals the number of output layer neurons. There is one weight for every hidden-neuron-to-output-neuron connection between the layers.","cspan":1,"rspan":1}],[{"value":"``Bo``","cspan":1,"rspan":1},{"value":"Output bias","cspan":1,"rspan":1},{"value":"(1, 2)","cspan":1,"rspan":1},{"value":"There is one column for every neuron in the output layer.","cspan":1,"rspan":1}],[{"value":"``Zo``","cspan":1,"rspan":1},{"value":"Output weighted input","cspan":1,"rspan":1},{"value":"(3, 2)","cspan":1,"rspan":1},{"value":"Computed by taking the dot product of H and Wo and then adding the output layer bias Bo. The dimensions are (3,2) representing the rows of in the hidden layer matrix and the columns of output layer weights matrix.","cspan":1,"rspan":1}],[{"value":"``O``","cspan":1,"rspan":1},{"value":"Output activations","cspan":1,"rspan":1},{"value":"(3, 2)","cspan":1,"rspan":1},{"value":"Each row represents a prediction for a single observation in our training set. Each column is a unique attribute we want to predict. Examples of two-column output predictions could be a company's sales and units sold, or a person's height and weight.","cspan":1,"rspan":1}]]},"theme":null,"fixed_layout":false}
File renamed without changes.
File renamed without changes.

0 comments on commit a2ddc6b

Please sign in to comment.