Skip to content

Commit

Permalink
first yadll commit
Browse files Browse the repository at this point in the history
  • Loading branch information
pchavanne committed Jul 27, 2016
1 parent 749e6d5 commit 7cc4029
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
6 changes: 3 additions & 3 deletions tests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,9 +251,9 @@ def layer_c0(self, dropout, input_layer):
def layer_c1(self, dropout, input_layer):
return dropout(input_layer, corruption_level=1)

def test_get_output(self, layer, layer_c0, layer_c1):
np.testing.assert_array_equal(layer.get_output().eval(), layer_c0.get_output().eval())
# assert np.all(layer_c1.get_output().eval() == 0)
def test_get_output(self, input_layer, layer, layer_c0, layer_c1):
np.testing.assert_array_equal(input_layer.get_output().eval(), layer_c0.get_output().eval())
assert np.all(layer_c1.get_output().eval() == 0)


class TestDropConnect:
Expand Down
8 changes: 4 additions & 4 deletions yadll/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,9 +254,9 @@ def __init__(self, incoming, corruption_level=0.5, **kwargs):
super(Dropout, self).__init__(incoming, **kwargs)
self.p = 1 - corruption_level

def get_output(self, stochastic=False, **kwargs):
def get_output(self, stochastic=True, **kwargs):
X = self.input_layer.get_output(stochastic=stochastic, **kwargs)
if self.p > 0 and stochastic:
if self.p != 1 and stochastic:
X = X * T_rng.binomial(self.input_shape, n=1, p=self.p, dtype=floatX)
return X

Expand All @@ -269,9 +269,9 @@ def __init__(self, incoming, nb_units, corruption_level=0.5, **kwargs):
super(Dropconnect, self).__init__(incoming, nb_units, **kwargs)
self.p = 1 - corruption_level

def get_output(self, stochastic=False, **kwargs):
def get_output(self, stochastic=True, **kwargs):
X = self.input_layer.get_output(stochastic=stochastic, **kwargs)
if self.p > 0 and stochastic:
if self.p != 1 and stochastic:
self.W = self.W * T_rng.binomial(self.shape, n=1, p=self.p, dtype=floatX)
return self.activation(T.dot(X, self.W) + self.b)

Expand Down

0 comments on commit 7cc4029

Please sign in to comment.