Skip to content
This repository has been archived by the owner on Jul 10, 2021. It is now read-only.

Commit

Permalink
Adding tests for coverage of missing MLP functionality, AE officially…
Browse files Browse the repository at this point in the history
… disabled.
  • Loading branch information
alexjc committed Nov 17, 2015
1 parent 315460b commit 674e8e3
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ install:
- pip install -q argparse coveralls lasagne

script:
- echo -e "[run]\nomit=\n sknn/backend/pylearn2/pywrap2.py\n sknn/backend/deepy/*\n sknn/backend/__init__.py*\n" > .coveragerc
- echo -e "[run]\nomit=\n *ae.py*\n sknn/backend/deepy/*\n sknn/backend/__init__.py*\n" > .coveragerc
- nosetests --with-coverage --cover-package=sknn -v sknn.tests

after_success:
Expand Down
8 changes: 5 additions & 3 deletions sknn/backend/lasagne/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,11 @@ def _initialize_impl(self, X, y=None):
test_size=self.valid_size,
random_state=self.random_state)
self.valid_set = X_v, y_v

if self.valid_set and self.is_convolution:
X_v, y_v = self.valid_set
if X_v.shape[-2:] != X.shape[-2:]:
self.valid_set = numpy.transpose(X_v, (0, 3, 1, 2)), y_v

params = []
for spec, mlp_layer in zip(self.layers, self.mlp):
Expand All @@ -204,9 +209,6 @@ def _initialize_impl(self, X, y=None):
return X, y

def _predict_impl(self, X):
if not self.is_initialized:
self._initialize_impl(X)

if self.is_convolution:
X = numpy.transpose(X, (0, 3, 1, 2))
return self.f(X)
Expand Down
3 changes: 3 additions & 0 deletions sknn/tests/test_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ class TestClassifierFunctionality(unittest.TestCase):
def setUp(self):
self.nn = MLPC(layers=[L("Softmax")], n_iter=1)

def test_IsClassifier(self):
assert_true(self.nn.is_classifier)

def test_FitAutoInitialize(self):
a_in, a_out = numpy.zeros((8,16)), numpy.random.randint(0, 5, (8,))
self.nn.fit(a_in, a_out)
Expand Down
13 changes: 12 additions & 1 deletion sknn/tests/test_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,25 @@ def test_HorizontalKernel(self):
L("Linear")],
n_iter=1))

def test_ValidationSet(self):
def test_ValidationSize(self):
self._run(MLPR(
layers=[
C("Tanh", channels=4, kernel_shape=(3,3)),
L("Linear")],
n_iter=1,
valid_size=0.5))

def test_ValidationSet(self):
v_in = numpy.zeros((8,32,16,1))
v_out = numpy.zeros((8,4))

self._run(MLPR(
layers=[
C("Tanh", channels=4, kernel_shape=(3,3)),
L("Linear")],
n_iter=1,
valid_set=(v_in, v_out)))

def test_MultipleLayers(self):
self._run(MLPR(
layers=[
Expand Down
3 changes: 3 additions & 0 deletions sknn/tests/test_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ def setUp(self):

def test_LifeCycle(self):
del self.nn

def test_IsNotClassifier(self):
assert_false(self.nn.is_classifier)

def test_PredictNoOutputUnitsAssertion(self):
a_in = numpy.zeros((8,16))
Expand Down
2 changes: 1 addition & 1 deletion sknn/tests/test_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_MeanSquaredErrorLinear(self):
self._run(nn)

@unittest.skipIf(sknn.backend.name != 'lasagne', 'only lasagne')
def test_MeanSquaredErrorLinear(self):
def test_CategoricalCrossEntropyLinear(self):
nn = MLPR(layers=[L("Softmax")], loss_type='mcc', n_iter=1)
self._run(nn)

Expand Down

0 comments on commit 674e8e3

Please sign in to comment.