Skip to content

Commit

Permalink
test(tailor): add test for name is none (#87)
Browse files Browse the repository at this point in the history
* test(tailor): ignore class itself add test for name is none

* test(tailor): ignore class itself add test for name is none

* test(tailor): trigger a build
  • Loading branch information
bwanglzu committed Oct 6, 2021
1 parent 1956a3d commit 47b7a55
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 2 deletions.
1 change: 1 addition & 0 deletions finetuner/tailor/pytorch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ def hook(module, input, output):
not output_shape
or len(output_shape) != 2
or not is_list_int(output_shape)
or summary[layer]['cls_name'] in self._model.__class__.__name__
):
continue

Expand Down
6 changes: 5 additions & 1 deletion tests/unit/tailor/test_keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,11 @@ def test_trim_fail_given_unexpected_layer_name(model, layer_name):
('vgg16_cnn_model', 'fc1', (None, 4096)),
('stacked_lstm', 'lstm_2', (None, 256)),
('bidirectional_lstm', 'bidirectional', (None, 128)),
('dense_model', None, (None, 10)),
('simple_cnn_model', None, (None, 10)),
('vgg16_cnn_model', None, (None, 1000)),
('stacked_lstm', None, (None, 5)),
('bidirectional_lstm', None, (None, 32)),
],
indirect=['model'],
)
Expand Down Expand Up @@ -159,7 +164,6 @@ def test_keras_model_parser():
assert r[1]['name'] == 'l2'
assert r[2]['name'] == 'l3'

# flat layer can be a nonparametric candidate
assert r[0]['output_features'] == 784
assert r[0]['params'] == 0

Expand Down
19 changes: 19 additions & 0 deletions tests/unit/tailor/test_paddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,25 @@ def test_freeze(model, layer_name, input_size, input_dtype):
),
('stacked_lstm', 'linear_33', (128,), (1, 128), 'int64', [1, 256]),
('bidirectional_lstm', 'linear_35', (128,), (1, 128), 'int64', [1, 128]),
('dense_model', None, (128,), (1, 128), 'float32', [1, 10]),
(
'simple_cnn_model',
None,
(1, 28, 28),
(1, 1, 28, 28),
'float32',
[1, 10],
),
(
'vgg16_cnn_model',
None,
(3, 224, 224),
(1, 3, 224, 224),
'float32',
[1, 4096],
),
('stacked_lstm', None, (128,), (1, 128), 'int64', [1, 5]),
('bidirectional_lstm', None, (128,), (1, 128), 'int64', [1, 128]),
],
indirect=['model'],
)
Expand Down
21 changes: 20 additions & 1 deletion tests/unit/tailor/test_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,25 @@ def test_trim_fail_given_unexpected_layer_idx(
),
('stacked_lstm', 'linear_3', (128,), (1, 128), 'int64', [1, 256]),
('bidirectional_lstm', 'linear_4', (128,), (1, 128), 'int64', [1, 128]),
('dense_model', None, (128,), (1, 128), 'float32', [1, 10]),
(
'simple_cnn_model',
None,
(1, 28, 28),
(1, 1, 28, 28),
'float32',
[1, 10],
),
(
'vgg16_cnn_model',
None,
(3, 224, 224),
(1, 3, 224, 224),
'float32',
[1, 4096],
),
('stacked_lstm', None, (128,), (1, 128), 'int64', [1, 5]),
('bidirectional_lstm', None, (128,), (1, 128), 'int64', [1, 128]),
],
indirect=['model'],
)
Expand All @@ -182,7 +201,7 @@ def test_trim(
if input_dtype == 'int64':
input_ = input_.type(torch.IntTensor)
out = pytorch_tailor.model(input_)
assert list(out.size()) == expected_output_shape # 4th layer Linear
assert list(out.size()) == expected_output_shape


@pytest.mark.parametrize(
Expand Down

0 comments on commit 47b7a55

Please sign in to comment.