Skip to content

Commit

Permalink
Merge pull request #268 from vladimir-v-diaz/code_coverage
Browse files Browse the repository at this point in the history
Code Coverage & Configuration
  • Loading branch information
vladimir-v-diaz committed Apr 29, 2015
2 parents ae26a52 + 1bd9e1c commit 5cb554c
Show file tree
Hide file tree
Showing 5 changed files with 77 additions and 12 deletions.
8 changes: 5 additions & 3 deletions tests/aggregate_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@
# modules.
random.shuffle(tests_without_extension)


suite = unittest.TestLoader().loadTestsFromNames(tests_without_extension)
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromNames(tests_without_extension)
all_tests_passed = unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
65 changes: 59 additions & 6 deletions tests/test_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -686,6 +686,10 @@ def test_3__update_metadata_if_changed(self):
self.assertTrue(self.repository_updater.metadata['current']['targets'])
self.assertEqual(self.repository_updater.metadata['current']['targets']['version'], 2)

# Test for an invalid 'referenced_metadata' argument.
self.assertRaises(tuf.RepositoryError,
self.repository_updater._update_metadata_if_changed,
'snapshot', 'bad_role')



Expand Down Expand Up @@ -789,8 +793,17 @@ def test_4__refresh_targets_metadata(self):
# Verify that client's metadata files were refreshed successfully.
self.assertEqual(len(self.repository_updater.metadata['current']), 5)



# Test for compressed metadata roles.
self.repository_updater.metadata['current']['snapshot']['meta']['targets.json.gz'] = \
self.repository_updater.metadata['current']['snapshot']['meta']['targets.json']
self.repository_updater._refresh_targets_metadata(include_delegations=True)

# Test for repository error if the 'targets' role is not specified
# in 'snapshot'.
del self.repository_updater.metadata['current']['snapshot']['meta']['targets.json']
self.assertRaises(tuf.RepositoryError,
self.repository_updater._refresh_targets_metadata,
'targets', True)


def test_5_all_targets(self):
Expand Down Expand Up @@ -1024,10 +1037,14 @@ def test_6_download_target(self):
download_targetfileinfo['custom'] = target_fileinfo['fileinfo']['custom']
self.assertEqual(target_fileinfo['fileinfo'], download_targetfileinfo)

# Test when consistent snapshots is set.
self.repository_updater.consistent_snapshots = True
# Test when consistent snapshots is set. TODO: create a valid repository
# with consistent snapshots set. The updater expects the existence
# of <hash>.filename files if root.json sets 'consistent_snapshot = True'.
"""
self.repository_updater.consistent_snapshot = True
self.repository_updater.download_target(target_fileinfo,
destination_directory)
"""

# Test: Invalid arguments.
self.assertRaises(tuf.FormatError, self.repository_updater.download_target,
Expand All @@ -1037,7 +1054,14 @@ def test_6_download_target(self):
target_fileinfo = self.repository_updater.target(random_target_filepath)
self.assertRaises(tuf.FormatError, self.repository_updater.download_target,
target_fileinfo, 8)


# Non-existent destination.
# TODO: test for non-existent directories.
"""
self.assertRaises(tuf.Error, self.repository_updater.download_target,
target_fileinfo, 'non-existent/bad_path')
"""

# Test:
# Attempt a file download of a valid target, however, a download exception
# occurs because the target is not within the mirror's confined target
Expand Down Expand Up @@ -1073,12 +1097,21 @@ def test_7_updated_targets(self):
# Get the list of target files. It will be used as an argument to
# 'updated_targets' function.
all_targets = self.repository_updater.all_targets()

# Test for duplicates and targets in the root directory of the repository.
additional_target = all_targets[0].copy()
all_targets.append(additional_target)
additional_target_in_root_directory = additional_target.copy()
additional_target_in_root_directory['filepath'] = 'file1.txt'
all_targets.append(additional_target_in_root_directory)

# At this point client needs to update and download all targets.
# Test: normal cases.
updated_targets = \
self.repository_updater.updated_targets(all_targets, destination_directory)

all_targets = self.repository_updater.all_targets()

# Assumed the pre-generated repository specifies two target files in
# 'targets.json' and one delegated target file in 'targets/role1.json'.
self.assertEqual(len(updated_targets), 3)
Expand All @@ -1096,7 +1129,7 @@ def test_7_updated_targets(self):
# Test: download all the targets.
for download_target in all_targets:
self.repository_updater.download_target(download_target,
destination_directory)
destination_directory)
updated_targets = \
self.repository_updater.updated_targets(all_targets, destination_directory)

Expand Down Expand Up @@ -1258,8 +1291,28 @@ def test_10__targets_of_role(self):
0)


def test_10__visit_child_role(self):
# Call _visit_child_role and test the dict keys: 'paths',
# 'path_hash_prefixes', and if both are missing.

targets_role = self.repository_updater.metadata['current']['targets']

child_role = targets_role['delegations']['roles'][0]
self.assertEqual(self.repository_updater._visit_child_role(child_role,
'/file3.txt'), child_role['name'])

# Test path hash prefixes.
child_role['path_hash_prefixes'] = ['8baf', '0000']
self.assertEqual(self.repository_updater._visit_child_role(child_role,
'/file3.txt'), child_role['name'])

# Test if both 'path' and 'path_hash_prefixes' is missing.
del child_role['paths']
del child_role['path_hash_prefixes']
self.assertRaises(tuf.FormatError, self.repository_updater._visit_child_role,
child_role, child_role['name'])




def _load_role_keys(keystore_directory):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def test_C2_find_delegated_role(self):
'targets/tuf')

# Test missing 'name' attribute (optional, but required by
# 'find_delegated_role()'.
# 'find_delegated_role()').
# Delete the duplicate role, and the remaining role's 'name' attribute.
del role_list[2]
del role_list[0]['name']
Expand Down
4 changes: 3 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# and then run "tox" from this directory.

[tox]
#envlist = py27
envlist = py26, py27, py32, py33, py34


Expand All @@ -12,7 +13,8 @@ changedir = tests

commands =
coverage run --source tuf aggregate_tests.py
coverage report -m
coverage report -m --fail-under 96
coverage html

deps =
coverage
Expand Down
10 changes: 9 additions & 1 deletion tuf/client/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -1914,6 +1914,9 @@ def _refresh_targets_metadata(self, rolename='targets', include_delegations=Fals
# extensions.
if metadata_path.endswith('.json'):
roles_to_update.append(metadata_path[:-len('.json')])

else:
continue

# Remove the 'targets' role because it gets updated when the targets.json
# file is updated in _update_metadata_if_changed('targets').
Expand Down Expand Up @@ -2440,6 +2443,9 @@ def _visit_child_role(self, child_role, target_filepath):
for child_role_path_hash_prefix in child_role_path_hash_prefixes:
if target_filepath_hash.startswith(child_role_path_hash_prefix):
child_role_is_relevant = True

else:
continue

elif child_role_paths is not None:
for child_role_path in child_role_paths:
Expand Down Expand Up @@ -2726,6 +2732,8 @@ def download_target(self, target, destination_directory):
raise

else:
logger.warning(repr(target_dirpath) + ' does not exist.')
message = repr(target_dirpath) + ' does not exist.'
logger.warning(message)
raise tuf.Error(message)

target_file_object.move(destination)

0 comments on commit 5cb554c

Please sign in to comment.