Skip to content

Commit

Permalink
bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
yusugomori committed Oct 19, 2015
1 parent 03efd4a commit 5c9cfe0
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
7 changes: 4 additions & 3 deletions python/Dropout.py
Expand Up @@ -87,10 +87,11 @@ def train(self, epochs=5000, dropout=True, p_dropout=0.5, rng=None):
else:
prev_layer = self.hidden_layers[i+1]

self.hidden_layers[i].backward(prev_layer=prev_layer)

if dropout == True:
self.hidden_layers[i].d_y *= dropout_masks[i] # also mask here
self.hidden_layers[i].backward(prev_layer=prev_layer, dropout=True, mask=dropout_masks[i])
else:
self.hidden_layers[i].backward(prev_layer=prev_layer)



def predict(self, x, dropout=True, p_dropout=0.5):
Expand Down
5 changes: 4 additions & 1 deletion python/HiddenLayer.py
Expand Up @@ -57,12 +57,15 @@ def forward(self, input=None):
return self.output(input=input)


def backward(self, prev_layer, lr=0.1, input=None):
def backward(self, prev_layer, lr=0.1, input=None, dropout=False, mask=None):
if input is not None:
self.x = input

d_y = self.dactivation(prev_layer.x) * numpy.dot(prev_layer.d_y, prev_layer.W.T)

if dropout == True:
d_y *= mask

self.W += lr * numpy.dot(self.x.T, d_y)
self.b += lr * numpy.mean(d_y, axis=0)
self.d_y = d_y
Expand Down

0 comments on commit 5c9cfe0

Please sign in to comment.