Skip to content

Commit

Permalink
Restore testability
Browse files Browse the repository at this point in the history
__Solution__: When running tests from the command line, ___always__ import the module `-m unittest` and use the verbose (`-v`) flag to see where Python is breaking up.
* Then you'll be able to trace down the problem.
* Do this a few times, and it will be reflex
  • Loading branch information
stephenjfox committed Feb 18, 2019
1 parent b085293 commit 24bb995
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 6 deletions.
4 changes: 2 additions & 2 deletions demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import morph.nn as net
from morph.layers.sparse import sparsify

from morph._models import EasyMnist
from morph.testing.models import EasyMnist


def random_dataset():
Expand All @@ -19,7 +19,7 @@ def main():

print(modified) # take a peek at the new layers. You take it from here

my_dataloader = DataLoader(random_dataset)
my_dataloader = DataLoader(random_dataset())

# get back the class that will do work
morphed = net.Morph(my_model, epochs=5, dataloader=my_dataloader)
Expand Down
2 changes: 1 addition & 1 deletion morph/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .nn.morph import once # facility tate "morph.once"
from .nn import once # facilitate "morph.once"
2 changes: 1 addition & 1 deletion morph/layers/widen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import torch.nn as nn

from ..nn.utils import layer_has_bias, redo_layer
from .._utils import check, round
from ..utils import check, round


# NOTE: should factor be {smaller, default at all}?
Expand Down
24 changes: 24 additions & 0 deletions morph/layers/widen_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import unittest

from .widen import widen, nn
from .._error import ValidationError


class TestWiden_Functional(unittest.TestCase):

DUD_LINEAR = nn.Linear(1, 1)

def test_widen_width_factor_too_small_should_fail(self):
with self.assertRaises(ValidationError):
widen(self.DUD_LINEAR, 0.8)

def test_widen_width_factor_identity_should_fail(self):
with self.assertRaises(ValidationError):
widen(self.DUD_LINEAR, 1.0)

def test_widen_width_factor_increases_layer_generously(self):
pass


if __name__ == "__main__":
unittest.main()
4 changes: 2 additions & 2 deletions morph/nn/utils.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import torch.nn as nn

from morph.nn._types import type_name, type_supported
from morph._utils import check
from morph.utils import check

from typing import List, Tuple, TypeVar

ML = TypeVar('MODULES', List[nn.Module])
ML = List[nn.Module]
# Type constrained to be the results of nn.Module.children() or ...named_children()
CL = TypeVar('MODULE_CHILDREN_LIST', ML, List[Tuple[str, nn.Module]])

Expand Down
File renamed without changes.
File renamed without changes.
19 changes: 19 additions & 0 deletions morph/utils_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import unittest

from .utils import round

class TestGlobalUtilities(unittest.TestCase):

def test_round_down(self):
test = 1.2
expected = 1
self.assertEqual(expected, round(test), '1.2 should round DOWN, to 1')

def test_round_up(self):
test = 1.7
expected = 2
self.assertEqual(expected, round(test), '1.7 should round UP, to 2')


if __name__ == "__main__":
unittest.main()

0 comments on commit 24bb995

Please sign in to comment.