Skip to content

Commit

Permalink
Replace the use of dict.items(mydict) with mydict.items(), dict.keys(…
Browse files Browse the repository at this point in the history
…mydict) with mydict.keys() and dict.values(mydict) with mydict.values()

Signed-off-by: Velichka Atanasova <avelichka@vmware.com>
  • Loading branch information
avelichka committed Apr 8, 2021
1 parent 59b5fa9 commit 51897fa
Show file tree
Hide file tree
Showing 20 changed files with 42 additions and 42 deletions.
2 changes: 1 addition & 1 deletion tests/test_endless_data_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def test_with_tuf(self):
self.repository_updater.refresh()

except tuf.exceptions.NoWorkingMirrorError as exception:
for mirror_url, mirror_error in dict.items(exception.mirror_errors):
for mirror_url, mirror_error in exception.mirror_errors.items():
self.assertTrue(isinstance(mirror_error, securesystemslib.exceptions.Error))

else:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_extraneous_dependencies_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def test_with_tuf(self):
# Verify that the specific 'tuf.exceptions.ForbiddenTargetError' exception is raised
# by each mirror.
except tuf.exceptions.NoWorkingMirrorError as exception:
for mirror_url, mirror_error in dict.items(exception.mirror_errors):
for mirror_url, mirror_error in exception.mirror_errors.items():
url_prefix = self.repository_mirrors['mirror1']['url_prefix']
url_file = os.path.join(url_prefix, 'metadata', 'role1.json')

Expand Down
4 changes: 2 additions & 2 deletions tests/test_formats.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,15 +286,15 @@ def test_schemas(self):

# Iterate 'valid_schemas', ensuring each 'valid_schema' correctly matches
# its respective 'schema_type'.
for schema_name, (schema_type, valid_schema) in dict.items(valid_schemas):
for schema_name, (schema_type, valid_schema) in valid_schemas.items():
if not schema_type.matches(valid_schema):
print('bad schema: ' + repr(valid_schema))
self.assertEqual(True, schema_type.matches(valid_schema))

# Test conditions for invalid schemas.
# Set the 'valid_schema' of 'valid_schemas' to an invalid
# value and test that it does not match 'schema_type'.
for schema_name, (schema_type, valid_schema) in dict.items(valid_schemas):
for schema_name, (schema_type, valid_schema) in valid_schemas.items():
invalid_schema = 0xBAD
if isinstance(schema_type, securesystemslib.schema.Integer):
invalid_schema = 'BAD'
Expand Down
4 changes: 2 additions & 2 deletions tests/test_indefinite_freeze_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def test_with_tuf(self):

except tuf.exceptions.NoWorkingMirrorError as e:
# Make sure the contained error is ExpiredMetadataError
for mirror_url, mirror_error in dict.items(e.mirror_errors):
for mirror_url, mirror_error in e.mirror_errors.items():
self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError))

else:
Expand Down Expand Up @@ -427,7 +427,7 @@ def test_with_tuf(self):

except tuf.exceptions.NoWorkingMirrorError as e:
# Make sure the contained error is ExpiredMetadataError
for mirror_url, mirror_error in dict.items(e.mirror_errors):
for mirror_url, mirror_error in e.mirror_errors.items():
self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError))
self.assertTrue(mirror_url.endswith('snapshot.json'))

Expand Down
2 changes: 1 addition & 1 deletion tests/test_mix_and_match_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def test_with_tuf(self):
# 'tuf.exceptions.BadVersionNumberError' exception is raised by
# each mirror.
except tuf.exceptions.NoWorkingMirrorError as exception:
for mirror_url, mirror_error in dict.items(exception.mirror_errors):
for mirror_url, mirror_error in exception.mirror_errors.items():
url_prefix = self.repository_mirrors['mirror1']['url_prefix']
url_file = os.path.join(url_prefix, 'metadata', 'role1.json')

Expand Down
2 changes: 1 addition & 1 deletion tests/test_multiple_repositories_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def test_repository_tool(self):
multi_repo_updater = updater.MultiRepoUpdater(self.map_file)
valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file3.txt')

for my_updater, my_targetinfo in dict.items(valid_targetinfo):
for my_updater, my_targetinfo in valid_targetinfo.items():
my_updater.download_target(my_targetinfo, self.temporary_directory)
self.assertTrue(os.path.exists(os.path.join(self.temporary_directory, 'file3.txt')))

Expand Down
2 changes: 1 addition & 1 deletion tests/test_replay_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def test_with_tuf(self):
# Verify that the specific 'tuf.exceptions.ReplayedMetadataError' is raised by each
# mirror.
except tuf.exceptions.NoWorkingMirrorError as exception:
for mirror_url, mirror_error in dict.items(exception.mirror_errors):
for mirror_url, mirror_error in exception.mirror_errors.items():
url_prefix = self.repository_mirrors['mirror1']['url_prefix']
url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json')

Expand Down
2 changes: 1 addition & 1 deletion tests/test_repository_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def test_get_target_hash(self):
'/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
'/packages/file2.txt': 'c9c4a5cdd84858dd6a23d98d7e6e6b2aec45034946c16b2200bc317c75415e92'
}
for filepath, target_hash in dict.items(expected_target_hashes):
for filepath, target_hash in expected_target_hashes.items():
self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash))
self.assertEqual(repo_lib.get_target_hash(filepath), target_hash)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_slow_retrieval_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def test_delay_before_send(self):
# Verify that the specific 'tuf.exceptions.SlowRetrievalError' exception is raised by
# each mirror.
except tuf.exceptions.NoWorkingMirrorError as exception:
for mirror_url, mirror_error in dict.items(exception.mirror_errors):
for mirror_url, mirror_error in exception.mirror_errors.items():
url_prefix = self.repository_mirrors['mirror1']['url_prefix']
url_file = os.path.join(url_prefix, 'targets', 'file1.txt')

Expand Down
14 changes: 7 additions & 7 deletions tests/test_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ def test_3__update_metadata(self):
DEFAULT_TARGETS_FILELENGTH, 88)

except tuf.exceptions.NoWorkingMirrorError as e:
for mirror_error in dict.values(e.mirror_errors):
for mirror_error in e.mirror_errors.values():
assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError)

else:
Expand All @@ -790,7 +790,7 @@ def test_3__update_metadata(self):
88)

except tuf.exceptions.NoWorkingMirrorError as e:
for mirror_error in dict.values(e.mirror_errors):
for mirror_error in e.mirror_errors.values():
assert isinstance(mirror_error, tuf.exceptions.BadVersionNumberError)

else:
Expand Down Expand Up @@ -838,7 +838,7 @@ def test_3__get_metadata_file(self):
# Note that this test provides a piece of metadata which would fail to
# be accepted -- with a different error -- if the specification version
# number were not a problem.
for mirror_error in dict.values(e.mirror_errors):
for mirror_error in e.mirror_errors.values():
assert isinstance(
mirror_error, tuf.exceptions.UnsupportedSpecificationError)

Expand Down Expand Up @@ -920,7 +920,7 @@ def test_3__targets_of_role(self):
# target files.
self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos_list))
for targetinfo in targetinfos_list:
self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in dict.items(targets_in_metadata))
self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in targets_in_metadata.items())



Expand Down Expand Up @@ -1083,7 +1083,7 @@ def test_5_targets_of_role(self):
# target files.
self.assertTrue(tuf.formats.TARGETINFOS_SCHEMA.matches(targetinfos))
for targetinfo in targetinfos:
self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in dict.items(expected_targets))
self.assertTrue((targetinfo['filepath'], targetinfo['fileinfo']) in expected_targets.items())

# Test: Invalid arguments.
# targets_of_role() expected a string rolename.
Expand Down Expand Up @@ -1367,7 +1367,7 @@ def test_6_download_target(self):
# field contains at least one confined target and excludes needed target
# file.
mirrors = self.repository_updater.mirrors
for mirror_name, mirror_info in dict.items(mirrors):
for mirror_name, mirror_info in mirrors.items():
mirrors[mirror_name]['confined_target_dirs'] = [self.random_path()]

try:
Expand Down Expand Up @@ -1629,7 +1629,7 @@ def test_9__get_target_hash(self):
'/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
'/Jalape\xc3\xb1o': '78bfd5c314680545eb48ecad508aceb861f8d6e680f4fe1b791da45c298cda88'
}
for filepath, target_hash in dict.items(expected_target_hashes):
for filepath, target_hash in expected_target_hashes.items():
self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
self.assertTrue(securesystemslib.formats.HASH_SCHEMA.matches(target_hash))
self.assertEqual(self.repository_updater._get_target_hash(filepath), target_hash)
Expand Down
12 changes: 6 additions & 6 deletions tests/test_updater_root_rotation_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def test_verify_root_with_current_keyids_and_threshold(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/2.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down Expand Up @@ -306,7 +306,7 @@ def test_verify_root_with_duplicate_current_keyids(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/2.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down Expand Up @@ -449,7 +449,7 @@ def test_root_rotation_missing_keys(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/2.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down Expand Up @@ -524,7 +524,7 @@ def test_root_rotation_unmet_last_version_threshold(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/3.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down Expand Up @@ -570,7 +570,7 @@ def test_root_rotation_unmet_new_threshold(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/3.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down Expand Up @@ -605,7 +605,7 @@ def test_root_rotation_discard_untrusted_version(self):
with self.assertRaises(tuf.exceptions.NoWorkingMirrorError) as cm:
self.repository_updater.refresh()

for mirror_url, mirror_error in dict.items(cm.exception.mirror_errors):
for mirror_url, mirror_error in cm.exception.mirror_errors.items():
self.assertTrue(mirror_url.endswith('/2.root.json'))
self.assertTrue(isinstance(mirror_error,
securesystemslib.exceptions.BadSignatureError))
Expand Down
14 changes: 7 additions & 7 deletions tuf/client/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ def _matching_targetinfo(
# a threshold of 2:
# [A, B, C, B, A, C]
# In this case, targetinfo B is returned.
for valid_updater, compared_targetinfo in dict.items(valid_targetinfo):
for valid_updater, compared_targetinfo in valid_targetinfo.items():

if not self._targetinfo_match(
targetinfo, compared_targetinfo, match_custom_field):
Expand Down Expand Up @@ -959,7 +959,7 @@ def _import_delegations(self, parent_role):
logger.debug('Adding roles delegated from ' + repr(parent_role) + '.')

# Iterate the keys of the delegated roles of 'parent_role' and load them.
for keyid, keyinfo in dict.items(keys_info):
for keyid, keyinfo in keys_info.items():
if keyinfo['keytype'] in ['rsa', 'ed25519', 'ecdsa-sha2-nistp256']:

# We specify the keyid to ensure that it's the correct keyid
Expand Down Expand Up @@ -1204,7 +1204,7 @@ def _check_hashes(self, file_object, trusted_hashes):
"""

# Verify each hash, raise an exception if any hash fails to verify
for algorithm, trusted_hash in dict.items(trusted_hashes):
for algorithm, trusted_hash in trusted_hashes.items():
digest_object = sslib_hash.digest_fileobject(file_object,
algorithm)
computed_hash = digest_object.hexdigest()
Expand Down Expand Up @@ -2097,7 +2097,7 @@ def _fileinfo_has_changed(self, metadata_filename, new_fileinfo):
# without having that result in considering all files as needing to be
# updated, or not all hash algorithms listed can be calculated on the
# specific client.
for algorithm, hash_value in dict.items(new_fileinfo['hashes']):
for algorithm, hash_value in new_fileinfo['hashes'].items():
# We're only looking for a single match. This isn't a security
# check, we just want to prevent unnecessary downloads.
if algorithm in current_fileinfo['hashes']:
Expand Down Expand Up @@ -2401,7 +2401,7 @@ def _refresh_targets_metadata(self, rolename='targets',

if refresh_all_delegated_roles:

for role in dict.keys(self.metadata['current']['snapshot']['meta']):
for role in self.metadata['current']['snapshot']['meta'].keys():
# snapshot.json keeps track of root.json, targets.json, and delegated
# roles (e.g., django.json, unclaimed.json). Remove the 'targets' role
# because it gets updated when the targets.json file is updated in
Expand Down Expand Up @@ -2491,7 +2491,7 @@ def _targets_of_role(self, rolename, targets=None, skip_refresh=False):
return []

# Get the targets specified by the role itself.
for filepath, fileinfo in dict.items(self.metadata['current'][rolename].get('targets', [])):
for filepath, fileinfo in self.metadata['current'][rolename].get('targets', []).items():
new_target = {}
new_target['filepath'] = filepath
new_target['fileinfo'] = fileinfo
Expand Down Expand Up @@ -3082,7 +3082,7 @@ def updated_targets(self, targets, destination_directory):

# Try one of the algorithm/digest combos for a mismatch. We break
# as soon as we find a mismatch.
for algorithm, digest in dict.items(target['fileinfo']['hashes']):
for algorithm, digest in target['fileinfo']['hashes'].items():
digest_object = None
try:
digest_object = sslib_hash.digest_filename(target_filepath,
Expand Down
2 changes: 1 addition & 1 deletion tuf/developer_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -947,7 +947,7 @@ def load_project(project_directory, prefix='', new_targets_location=None,
roleinfo['expires'] = metadata_object['expires']
roleinfo['paths'] = {}

for filepath, fileinfo in dict.items(metadata_object['targets']):
for filepath, fileinfo in metadata_object['targets'].items():
roleinfo['paths'].update({filepath: fileinfo.get('custom', {})})
roleinfo['delegations'] = metadata_object['delegations']
roleinfo['partial_loaded'] = False
Expand Down
2 changes: 1 addition & 1 deletion tuf/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def __init__(self, mirror_errors):
def __str__(self):
all_errors = 'No working mirror was found:'

for mirror_url, mirror_error in dict.items(self.mirror_errors):
for mirror_url, mirror_error in self.mirror_errors.items():
try:
# http://docs.python.org/2/library/urlparse.html#urlparse.urlparse
mirror_url_tokens = urllib.parse.urlparse(mirror_url)
Expand Down
2 changes: 1 addition & 1 deletion tuf/keydb.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'):
# Iterate the keys found in 'root_metadata' by converting them to
# 'RSAKEY_SCHEMA' if their type is 'rsa', and then adding them to the
# key database using the provided keyid.
for keyid, key_metadata in dict.items(root_metadata['keys']):
for keyid, key_metadata in root_metadata['keys'].items():
if key_metadata['keytype'] in _SUPPORTED_KEY_TYPES:
# 'key_metadata' is stored in 'KEY_SCHEMA' format. Call
# create_from_metadata_format() to get the key in 'RSAKEY_SCHEMA' format,
Expand Down
2 changes: 1 addition & 1 deletion tuf/mirrors.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict):
path_key = 'metadata_path' if file_type == 'meta' else 'targets_path'

list_of_mirrors = []
for junk, mirror_info in dict.items(mirrors_dict):
for junk, mirror_info in mirrors_dict.items():
# Does mirror serve this file type at all?
path = mirror_info.get(path_key)
if path is None:
Expand Down
8 changes: 4 additions & 4 deletions tuf/repository_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name):
repository_name=repository_name)

# Add the keys specified in the delegations field of the Targets role.
for keyid, key_metadata in dict.items(targets_metadata['delegations']['keys']):
for keyid, key_metadata in targets_metadata['delegations']['keys'].items():

# Use the keyid found in the delegation
key_object, _ = sslib_keys.format_metadata_to_key(key_metadata,
Expand Down Expand Up @@ -1427,7 +1427,7 @@ def generate_targets_metadata(targets_directory, target_files, version,
if use_existing_fileinfo:
# Use the provided fileinfo dicts, conforming to FILEINFO_SCHEMA, rather than
# generating fileinfo
for target, fileinfo in dict.items(target_files):
for target, fileinfo in target_files.items():

# Ensure all fileinfo entries in target_files have a non-empty hashes dict
if not fileinfo.get('hashes', None):
Expand Down Expand Up @@ -1496,7 +1496,7 @@ def _generate_targets_fileinfo(target_files, targets_directory,
filedict = {}

# Generate the fileinfo of all the target files listed in 'target_files'.
for target, fileinfo in dict.items(target_files):
for target, fileinfo in target_files.items():

# The root-most folder of the targets directory should not be included in
# target paths listed in targets metadata.
Expand All @@ -1517,7 +1517,7 @@ def _generate_targets_fileinfo(target_files, targets_directory,

# Copy 'target_path' to 'digest_target' if consistent hashing is enabled.
if write_consistent_targets:
for target_digest in dict.values(filedict[relative_targetpath]['hashes']):
for target_digest in filedict[relative_targetpath]['hashes'].values():
dirname, basename = os.path.split(target_path)
digest_filename = target_digest + '.' + basename
digest_target = os.path.join(dirname, digest_filename)
Expand Down
2 changes: 1 addition & 1 deletion tuf/repository_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -3159,7 +3159,7 @@ def load_repository(repository_directory, repository_name='default',
# log a warning here as there may be many such duplicate key warnings.
# The repository maintainer should have also been made aware of the
# duplicate key when it was added.
for key_metadata in dict.values(metadata_object['delegations']['keys']):
for key_metadata in metadata_object['delegations']['keys'].values():

# The repo may have used hashing algorithms for the generated keyids
# that doesn't match the client's set of hash algorithms. Make sure
Expand Down
2 changes: 1 addition & 1 deletion tuf/roledb.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'):

# Iterate the roles found in 'root_metadata' and add them to '_roledb_dict'.
# Duplicates are avoided.
for rolename, roleinfo in dict.items(root_metadata['roles']):
for rolename, roleinfo in root_metadata['roles'].items():
if rolename == 'root':
roleinfo['version'] = root_metadata['version']
roleinfo['expires'] = root_metadata['expires']
Expand Down
2 changes: 1 addition & 1 deletion tuf/scripts/repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,7 @@ def remove_target_files_from_metadata(parsed_arguments, repository):
parsed_arguments.role, repository._repository_name)

for glob_pattern in parsed_arguments.remove:
for path in list(dict.keys(roleinfo['paths'])):
for path in list(roleinfo['paths'].keys()):
if fnmatch.fnmatch(path, glob_pattern):
del roleinfo['paths'][path]

Expand Down

0 comments on commit 51897fa

Please sign in to comment.