Permalink
Browse files

scale activations before doing dropout

  • Loading branch information...
rohan-varma committed Jan 17, 2018
1 parent 9230159 commit 1040f5f091a38e369e933fde6d72f7f49e84b049
Showing with 6 additions and 1 deletion.
  1. +6 −1 NeuralNetwork.py
@@ -116,11 +116,16 @@ def add_bias_unit(self, X, column=True):

return bias_added

def compute_dropout(self, activations):
def compute_dropout(self, activations, dropout_prob = 0.5):
"""Sets half of the activations to zero
Params: activations - numpy array
Return: activations, which half set to zero
"""
# handle error
if dropout_prob < 0 or dropout_prob > 1:
dropout_prob = 0.5
# scale the activations (see http://cs231n.github.io/neural-networks-2/)
activations/=dropout_prob
mult = np.random.binomial(1, 0.5, size = activations.shape)
activations*=mult
return activations

0 comments on commit 1040f5f

Please sign in to comment.