Skip to content

Commit

Permalink
Merge pull request #225 from vladimir-v-diaz/develop
Browse files Browse the repository at this point in the history
Santiago's #224 pull request (with minor edit) and cosmetic edits to miscellaneous modules
  • Loading branch information
vladimir-v-diaz committed Jun 16, 2014
2 parents ac38e55 + 40a6539 commit 68b2337
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 54 deletions.
60 changes: 40 additions & 20 deletions tests/repository_data/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,29 +25,45 @@

import shutil
import datetime
import optparse

from tuf.repository_tool import *
import tuf.util


parser = optparse.OptionParser()
parser.add_option("-k","--keys", action='store_true', dest="should_generate_keys",
help="Generate a new set of keys", default=False)
parser.add_option("-d","--dry-run", action='store_true', dest="dry_run",
help="Do not write the files, just run", default=False)
(options, args) = parser.parse_args()


repository = create_new_repository('repository')

# Generate and save the top-level role keys, including the delegated roles.
# The unit tests should only have to import the keys they need from these
# pre-generated key files.
root_key_file = 'keystore/root_key'
targets_key_file = 'keystore/targets_key'
snapshot_key_file = 'keystore/snapshot_key'
timestamp_key_file = 'keystore/timestamp_key'
delegation_key_file = 'keystore/delegation_key'

# Generate public and private key files for the top-level roles, and two
# delegated roles (these number of keys should be sufficient for most of the
# unit tests). Unit tests may generate additional keys, if needed.
generate_and_write_rsa_keypair(root_key_file, bits=2048, password='password')
generate_and_write_rsa_keypair(targets_key_file, bits=2048, password='password')
generate_and_write_rsa_keypair(snapshot_key_file, bits=2048, password='password')
generate_and_write_rsa_keypair(timestamp_key_file, bits=2048, password='password')
generate_and_write_rsa_keypair(delegation_key_file, bits=2048, password='password')

if options.should_generate_keys and not options.dry_run:
# Generate and save the top-level role keys, including the delegated roles.
# The unit tests should only have to import the keys they need from these
# pre-generated key files.
# Generate public and private key files for the top-level roles, and two
# delegated roles (these number of keys should be sufficient for most of the
# unit tests). Unit tests may generate additional keys, if needed.
generate_and_write_rsa_keypair(root_key_file, bits=2048, password='password')
generate_and_write_rsa_keypair(targets_key_file, bits=2048,
password='password')
generate_and_write_rsa_keypair(snapshot_key_file, bits=2048,
password='password')
generate_and_write_rsa_keypair(timestamp_key_file, bits=2048,
password='password')
generate_and_write_rsa_keypair(delegation_key_file, bits=2048,
password='password')

# Import the public keys. These keys are needed so that metadata roles are
# assigned verification keys, which clients use to verify the signatures created
Expand Down Expand Up @@ -88,14 +104,15 @@
target3_filepath = 'repository/targets/file3.txt'
tuf.util.ensure_parent_dir(target2_filepath)

with open(target1_filepath, 'wt') as file_object:
file_object.write('This is an example target file.')
if not options.dry_run:
with open(target1_filepath, 'wt') as file_object:
file_object.write('This is an example target file.')

with open(target2_filepath, 'wt') as file_object:
file_object.write('This is an another example target file.')
with open(target2_filepath, 'wt') as file_object:
file_object.write('This is an another example target file.')

with open(target3_filepath, 'wt') as file_object:
file_object.write('This is role1\'s target file.')
with open(target3_filepath, 'wt') as file_object:
file_object.write('This is role1\'s target file.')

# Add target files to the top-level 'targets.json' role. These target files
# should already exist.
Expand All @@ -119,15 +136,18 @@
repository.targets.compressions = ['gz']

# Create the actual metadata files, which are saved to 'metadata.staged'.
repository.write()
if not options.dry_run:
repository.write()

# Move the staged.metadata to 'metadata' and create the client folder. The
# client folder, which includes the required directory structure and metadata
# files for clients to successfully load an 'tuf.client.updater.py' object.
staged_metadata_directory = 'repository/metadata.staged'
metadata_directory = 'repository/metadata'
shutil.copytree(staged_metadata_directory, metadata_directory)
if not options.dry_run:
shutil.copytree(staged_metadata_directory, metadata_directory)

# Create the client files (required directory structure and minimal metadata)
# required by the 'tuf.interposition' and 'tuf.client.updater.py' updaters.
create_tuf_client_directory('repository', 'client')
if not options.dry_run:
create_tuf_client_directory('repository', 'client')
11 changes: 6 additions & 5 deletions tuf/formats.py
Original file line number Diff line number Diff line change
Expand Up @@ -839,7 +839,7 @@ def format_base64(data):
return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ')

except (TypeError, binascii.Error) as e:
raise tuf.FormatError('Invalid base64 encoding: '+str(e))
raise tuf.FormatError('Invalid base64 encoding: ' + str(e))



Expand Down Expand Up @@ -879,7 +879,7 @@ def parse_base64(base64_string):
return binascii.a2b_base64(base64_string.encode('utf-8'))

except (TypeError, binascii.Error) as e:
raise tuf.FormatError('Invalid base64 encoding: '+str(e))
raise tuf.FormatError('Invalid base64 encoding: ' + str(e))



Expand Down Expand Up @@ -1077,7 +1077,8 @@ def get_role_class(expected_rolename):
role_class = ROLE_CLASSES_BY_TYPE[expected_rolename]

except KeyError:
raise tuf.FormatError(repr(expected_rolename)+' not supported.')
raise tuf.FormatError(repr(expected_rolename) + ' not supported.')

else:
return role_class

Expand Down Expand Up @@ -1164,7 +1165,7 @@ def check_signable_object_format(object):
schema = SCHEMAS_BY_TYPE[role_type]

except KeyError:
raise tuf.FormatError('Unrecognized type '+repr(role_type))
raise tuf.FormatError('Unrecognized type ' + repr(role_type))

# 'tuf.FormatError' raised if 'object' does not have a properly
# formatted role schema.
Expand Down Expand Up @@ -1307,7 +1308,7 @@ def encode_canonical(object, output_function=None):
_encode_canonical(object, output_function)

except (TypeError, tuf.FormatError) as e:
message = 'Could not encode '+repr(object)+': '+str(e)
message = 'Could not encode ' + repr(object) + ': ' + str(e)
raise tuf.FormatError(message)

# Return the encoded 'object' as a string.
Expand Down
36 changes: 18 additions & 18 deletions tuf/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,17 +267,17 @@ def __init__(self, length):
if isinstance(length, bool) or not isinstance(length, six.integer_types):
# We need to check for bool as a special case, since bool
# is for historical reasons a subtype of int.
raise tuf.FormatError('Got '+repr(length)+' instead of an integer.')
raise tuf.FormatError('Got ' + repr(length) + ' instead of an integer.')

self._string_length = length


def check_match(self, object):
if not isinstance(object, six.string_types):
raise tuf.FormatError('Expected a string but got '+repr(object))
raise tuf.FormatError('Expected a string but got ' + repr(object))

if len(object) != self._string_length:
raise tuf.FormatError('Expected a string of length '+
raise tuf.FormatError('Expected a string of length ' + \
repr(self._string_length))


Expand Down Expand Up @@ -309,17 +309,17 @@ def __init__(self, length):
if isinstance(length, bool) or not isinstance(length, six.integer_types):
# We need to check for bool as a special case, since bool
# is for historical reasons a subtype of int.
raise tuf.FormatError('Got '+repr(length)+' instead of an integer.')
raise tuf.FormatError('Got ' + repr(length) + ' instead of an integer.')

self._bytes_length = length


def check_match(self, object):
if not isinstance(object, six.binary_type):
raise tuf.FormatError('Expected a byte but got '+repr(object))
raise tuf.FormatError('Expected a byte but got ' + repr(object))

if len(object) != self._bytes_length:
raise tuf.FormatError('Expected a byte of length '+
raise tuf.FormatError('Expected a byte of length ' + \
repr(self._bytes_length))


Expand Down Expand Up @@ -358,10 +358,10 @@ class OneOf(Schema):
def __init__(self, alternatives):
# Ensure each item of the list contains the expected object type.
if not isinstance(alternatives, list):
raise tuf.FormatError('Expected a list but got '+repr(alternatives))
raise tuf.FormatError('Expected a list but got ' + repr(alternatives))
for alternative in alternatives:
if not isinstance(alternative, Schema):
raise tuf.FormatError('List contains an invalid item '+repr(alternative))
raise tuf.FormatError('List contains an invalid item ' + repr(alternative))

self._alternatives = alternatives

Expand Down Expand Up @@ -752,15 +752,15 @@ def check_match(self, object):
except KeyError:
# If not an Optional schema, raise an exception.
if not isinstance(schema, Optional):
message = 'Missing key '+repr(key)+' in '+repr(self._object_name)
message = 'Missing key ' + repr(key) + ' in ' + repr(self._object_name)
raise tuf.FormatError(message)
# Check that 'object's schema matches Object()'s schema for this
# particular 'key'.
else:
try:
schema.check_match(item)
except tuf.FormatError as e:
raise tuf.FormatError(str(e)+' in '+self._object_name+'.'+key)
raise tuf.FormatError(str(e) + ' in ' + self._object_name + '.' + key)



Expand Down Expand Up @@ -840,11 +840,11 @@ def __init__(self, sub_schemas, optional_schemas=[], allow_more=False,

# Ensure each item of the list contains the expected object type.
if not isinstance(sub_schemas, (list, tuple)):
message = 'Expected Schema but got '+repr(sub_schemas)
message = 'Expected Schema but got ' + repr(sub_schemas)
raise tuf.FormatError(message)
for schema in sub_schemas:
if not isinstance(schema, Schema):
raise tuf.FormatError('Expected Schema but got '+repr(schema))
raise tuf.FormatError('Expected Schema but got ' + repr(schema))

self._sub_schemas = sub_schemas + optional_schemas
self._min = len(sub_schemas)
Expand All @@ -854,11 +854,11 @@ def __init__(self, sub_schemas, optional_schemas=[], allow_more=False,

def check_match(self, object):
if not isinstance(object, (list, tuple)):
raise tuf.FormatError('Expected '+repr(self._struct_name)+'; got '+repr(object))
raise tuf.FormatError('Expected ' + repr(self._struct_name) + '; got ' + repr(object))
elif len(object) < self._min:
raise tuf.FormatError('Too few fields in '+self._struct_name)
raise tuf.FormatError('Too few fields in ' + self._struct_name)
elif len(object) > len(self._sub_schemas) and not self._allow_more:
raise tuf.FormatError('Too many fields in '+self._struct_name)
raise tuf.FormatError('Too many fields in ' + self._struct_name)

# Iterate through the items of 'object', checking against each schema
# in the list of schemas allowed (i.e., the sub-schemas and also
Expand Down Expand Up @@ -916,7 +916,7 @@ def __init__(self, pattern=None, modifiers=0, re_object=None, re_name=None):

if not isinstance(pattern, six.string_types):
if pattern is not None:
raise tuf.FormatError(repr(pattern)+' is not a string.')
raise tuf.FormatError(repr(pattern) + ' is not a string.')

if re_object is None:
if pattern is None:
Expand All @@ -929,15 +929,15 @@ def __init__(self, pattern=None, modifiers=0, re_object=None, re_name=None):

if re_name is None:
if pattern is not None:
re_name = 'pattern /'+pattern+'/'
re_name = 'pattern /' + pattern + '/'
else:
re_name = 'pattern'
self._re_name = re_name


def check_match(self, object):
if not isinstance(object, six.string_types) or not self._re_object.match(object):
raise tuf.FormatError(repr(object)+' did not match '+repr(self._re_name))
raise tuf.FormatError(repr(object) + ' did not match ' + repr(self._re_name))



Expand Down
2 changes: 1 addition & 1 deletion tuf/sig.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def verify(signable, role):
# Does 'status' have the required threshold of signatures?
# First check for invalid threshold values before returning result.
if threshold is None or threshold <= 0:
raise tuf.Error("Invalid threshold: "+str(threshold))
raise tuf.Error("Invalid threshold: " + str(threshold))

return len(good_sigs) >= threshold

Expand Down
21 changes: 11 additions & 10 deletions tuf/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@ def move(self, destination_path):
destination_file = open(destination_path, 'wb')
shutil.copyfileobj(self.temporary_file, destination_file)
destination_file.close()

# 'self.close()' closes temporary file which destroys itself.
self.close_temp_file()

Expand Down Expand Up @@ -389,7 +390,7 @@ def get_file_details(filepath, hash_algorithms=['sha256']):

# Does the path exists?
if not os.path.exists(filepath):
raise tuf.Error('Path '+repr(filepath)+' doest not exist.')
raise tuf.Error('Path ' + repr(filepath) + ' doest not exist.')
filepath = os.path.abspath(filepath)

# Obtaining length of the file.
Expand Down Expand Up @@ -554,7 +555,7 @@ def find_delegated_role(roles, delegated_role):

# ...there are at least two roles with the same name.
else:
duplicate_role_message = 'Duplicate role ('+str(delegated_role)+').'
duplicate_role_message = 'Duplicate role (' + str(delegated_role) + ').'
raise tuf.RepositoryError(duplicate_role_message)

# This role has a different name.
Expand Down Expand Up @@ -663,7 +664,7 @@ def ensure_all_targets_allowed(rolename, list_of_targets, parent_delegations):
if len(actual_child_targets) > 0:
if not consistent(actual_child_targets,
allowed_child_path_hash_prefixes):
message = repr(rolename)+' specifies a target that does not'+\
message = repr(rolename) + ' specifies a target that does not' + \
' have a path hash prefix listed in its parent role.'
raise tuf.ForbiddenTargetError(message)

Expand Down Expand Up @@ -692,8 +693,8 @@ def ensure_all_targets_allowed(rolename, list_of_targets, parent_delegations):
# 'role' should have been validated when it was downloaded.
# The 'paths' or 'path_hash_prefixes' attributes should not be missing,
# so raise an error in case this clause is reached.
raise tuf.FormatError(repr(role)+' did not contain one of '+\
'the required fields ("paths" or '+\
raise tuf.FormatError(repr(role) + ' did not contain one of ' +\
'the required fields ("paths" or ' +\
'"path_hash_prefixes").')

# Raise an exception if the parent has not delegated to the specified
Expand Down Expand Up @@ -873,11 +874,11 @@ def load_json_string(data):
deserialized_object = json.loads(data)

except TypeError:
message = 'Invalid JSON string: '+repr(data)
message = 'Invalid JSON string: ' + repr(data)
raise tuf.Error(message)

except ValueError:
message = 'Cannot deserialize to a Python object: '+repr(data)
message = 'Cannot deserialize to a Python object: ' + repr(data)
raise tuf.Error(message)

else:
Expand Down Expand Up @@ -916,18 +917,18 @@ def load_json_file(filepath):

# The file is mostly likely gzipped.
if filepath.endswith('.gz'):
logger.debug('gzip.open('+str(filepath)+')')
logger.debug('gzip.open(' + str(filepath) + ')')
fileobject = six.StringIO(gzip.open(filepath).read().decode('utf-8'))

else:
logger.debug('open('+str(filepath)+')')
logger.debug('open(' + str(filepath) + ')')
fileobject = open(filepath)

try:
deserialized_object = json.load(fileobject)

except (ValueError, TypeError) as e:
message = 'Cannot deserialize to a Python object: '+repr(filepath)
message = 'Cannot deserialize to a Python object: ' + repr(filepath)
raise tuf.Error(message)

else:
Expand Down

0 comments on commit 68b2337

Please sign in to comment.