diff --git a/basic_client.py b/basic_client.py index 96b3977edc..da66ac7f2e 100755 --- a/basic_client.py +++ b/basic_client.py @@ -102,7 +102,7 @@ def update_client(repository_mirror): repository_mirrors = {'mirror': {'url_prefix': repository_mirror, 'metadata_path': 'metadata', 'targets_path': 'targets', - 'confined_target_paths': ['']}} + 'confined_target_dirs': ['']}} # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. diff --git a/example_client.py b/example_client.py index 3eb386483e..78b1103f48 100755 --- a/example_client.py +++ b/example_client.py @@ -41,7 +41,7 @@ repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata', 'targets_path': 'targets', - 'confined_target_paths': ['']}} + 'confined_target_dirs': ['']}} # Create the Upater object using the updater name 'tuf-example' # and the repository mirrors defined above. diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 0e113c06d1..d5ce1d1148 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -62,16 +62,16 @@ # mirror is located at 'http://localhost:8001', and all of the metadata # and targets files can be found in the 'metadata' and 'targets' directory, # respectively. If the client wishes to only download target files from - # specific directories on the mirror, the 'confined_target_paths' field + # specific directories on the mirror, the 'confined_target_dirs' field # should be set. In the example, the client has chosen '', which is # interpreted as no confinement. In other words, the client can download # targets from any directory or subdirectories. If the client had chosen - # 'targets1', they would have been confined to the '/targets/targets1/' + # 'targets1/', they would have been confined to the '/targets/targets1/' # directory on the 'http://localhost:8001' mirror. repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata', 'targets_path': 'targets', - 'confined_target_paths': ['']}} + 'confined_target_dirs': ['']}} # The updater may now be instantiated. The Updater class of 'updater.py' # is called with two arguments. The first argument assigns a name to this @@ -232,7 +232,7 @@ def __init__(self, updater_name, repository_mirrors): repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata', 'targets_path': 'targets', - 'confined_target_paths': ['']}} + 'confined_target_dirs': ['']}} tuf.FormatError: diff --git a/tuf/formats.py b/tuf/formats.py index e24c3a2a50..698e24632f 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -324,7 +324,7 @@ url_prefix=URL_SCHEMA, metadata_path=RELPATH_SCHEMA, targets_path=RELPATH_SCHEMA, - confined_target_paths=SCHEMA.ListOf(PATH_SCHEMA), + confined_target_dirs=SCHEMA.ListOf(RELPATH_SCHEMA), custom=SCHEMA.Optional(SCHEMA.Object())) # A dictionary of mirrors where the dict keys hold the mirror's name and diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 0cba474b1f..8642de6d53 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -50,7 +50,7 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): {'url_prefix': 'http://localhost:8001' 'metadata_path': 'metadata/' 'targets_path': 'targets/' - 'confined_target_paths': ['targets/release1', ...] + 'confined_target_dirs': ['targets/release1/', ...] 'custom': {...}} The 'custom' field is optional. @@ -75,12 +75,12 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): '\'target\'.') raise tuf.FormatError(msg) - # Reference to 'tuf.util.path_in_confined_paths()' (improve readability). - # This function checks whether a mirror serves the required file. + # Reference to 'tuf.util.file_in_confined_directories()' (improve readability). + # This function checks whether a mirror should serve a file to the client. # A client may be confined to certain paths on a repository mirror # when fetching target files. This field may be set by the client when # the repository mirror is added to the 'tuf.client.updater.Updater' object. - in_confined = tuf.util.path_in_confined_paths + in_confined_directory = tuf.util.file_in_confined_directories list_of_mirrors = [] for mirror_name, mirror_info in mirrors_dict.items(): @@ -90,7 +90,8 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): else: targets_path = mirror_info['targets_path'] full_filepath = os.path.join(targets_path, file_path) - if not in_confined(full_filepath, mirror_info['confined_target_paths']): + if not in_confined_directory(full_filepath, + mirror_info['confined_target_dirs']): continue base = mirror_info['url_prefix']+'/'+mirror_info['targets_path'] diff --git a/tuf/tests/test_formats.py b/tuf/tests/test_formats.py index 1d7505f9ab..bbed5d34c1 100755 --- a/tuf/tests/test_formats.py +++ b/tuf/tests/test_formats.py @@ -223,14 +223,14 @@ def test_schemas(self): {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata/', 'targets_path': 'targets/', - 'confined_target_paths': ['path1/', 'path2/'], + 'confined_target_dirs': ['path1/', 'path2/'], 'custom': {'type': 'mirror'}}), 'MIRRORDICT_SCHEMA': (tuf.formats.MIRRORDICT_SCHEMA, {'mirror1': {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata/', 'targets_path': 'targets/', - 'confined_target_paths': ['path1/', 'path2/'], + 'confined_target_dirs': ['path1/', 'path2/'], 'custom': {'type': 'mirror'}}}), 'MIRRORLIST_SCHEMA': (tuf.formats.MIRRORLIST_SCHEMA, @@ -240,7 +240,7 @@ def test_schemas(self): 'mirrors': [{'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata/', 'targets_path': 'targets/', - 'confined_target_paths': ['path1/', 'path2/'], + 'confined_target_dirs': ['path1/', 'path2/'], 'custom': {'type': 'mirror'}}]})} # Iterate through 'valid_schemas', ensuring each 'valid_schema' correctly diff --git a/tuf/tests/test_mirrors.py b/tuf/tests/test_mirrors.py index 6f3d7d720c..e3bce3b855 100755 --- a/tuf/tests/test_mirrors.py +++ b/tuf/tests/test_mirrors.py @@ -34,32 +34,31 @@ def setUp(self): {'mirror1': {'url_prefix' : 'http://mirror1.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['']}, + 'confined_target_dirs' : ['']}, 'mirror2': {'url_prefix' : 'http://mirror2.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['targets/target3.py', - 'targets/target4.py']}, + 'confined_target_dirs' : ['targets/release/', + 'targets/release/']}, 'mirror3': {'url_prefix' : 'http://mirror3.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['targets/target1.py', - 'targets/target2.py']}} + 'confined_target_dirs' : ['targets/release/', + 'targets/release/']}} def test_get_list_of_mirrors(self): # Test: Normal case. - mirror_list = \ - mirrors.get_list_of_mirrors('meta', 'release.txt', self.mirrors) + mirror_list = mirrors.get_list_of_mirrors('meta', 'release.txt', self.mirrors) self.assertEquals(len(mirror_list), 3) for mirror, mirror_info in self.mirrors.items(): url = mirror_info['url_prefix']+'/metadata/release.txt' self.assertTrue(url in mirror_list) - mirror_list = mirrors.get_list_of_mirrors('target', 'a', self.mirrors) - self.assertEquals(len(mirror_list), 3) - self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a' in \ + mirror_list = mirrors.get_list_of_mirrors('target', 'a.txt', self.mirrors) + self.assertEquals(len(mirror_list), 1) + self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a.txt' in \ mirror_list) mirror_list = mirrors.get_list_of_mirrors('target', 'a/b', self.mirrors) diff --git a/tuf/tests/test_updater.py b/tuf/tests/test_updater.py index a31e3e8fdf..4da1f064ad 100755 --- a/tuf/tests/test_updater.py +++ b/tuf/tests/test_updater.py @@ -952,12 +952,12 @@ def test_6_download_target(self): # Test: # Attempt a file download of a valid target, however, a download exception # occurs because the target is not within the mirror's confined - # target paths. - # Adjust mirrors dictionary, so that 'confined_target_paths' field + # target directories. + # Adjust mirrors dictionary, so that 'confined_target_dirs' field # contains at least one confined target and excludes needed target file. mirrors = self.Repository.mirrors for mirror_name, mirror_info in mirrors.items(): - mirrors[mirror_name]['confined_target_paths'] = [self.random_path()] + mirrors[mirror_name]['confined_target_dirs'] = [self.random_path()] # Get the target file info. file_path = target_rel_paths_src[0] @@ -971,7 +971,7 @@ def test_6_download_target(self): dest_dir) for mirror_name, mirror_info in mirrors.items(): - mirrors[mirror_name]['confined_target_paths'] = [''] + mirrors[mirror_name]['confined_target_dirs'] = [''] diff --git a/tuf/tests/test_util.py b/tuf/tests/test_util.py old mode 100644 new mode 100755 index b91f68b73f..ef6d9f0f9d --- a/tuf/tests/test_util.py +++ b/tuf/tests/test_util.py @@ -21,16 +21,17 @@ import gzip import shutil import logging -import tuf.hash import tempfile import unittest -import unittest_toolbox import tuf +import tuf.log +import tuf.hash import tuf.util as util +import tuf.tests.unittest_toolbox as unittest_toolbox -# Disable/Enable logging. Uncomment to Disable. -logging.getLogger('tuf') +# Disable all logging calls of level CRITICAL and below. +# Comment the line below to enable logging. logging.disable(logging.CRITICAL) @@ -244,27 +245,35 @@ def test_B2_ensure_parent_dir(self): - def test_B3_path_in_confined_paths(self): - # Goal: Provide invalid input for 'test_path' and 'confined_paths'. + def test_B3_file_in_confined_directories(self): + # Goal: Provide invalid input for 'filepath' and 'confined_directories'. # Include inputs like: '[1, 2, "a"]' and such... - Errors = (tuf.FormatError, TypeError) - list_of_confined_paths = ['a', 12, {'a':'a'}, [1]] - list_of_paths = [12, ['a'], {'a':'a'}, 'a'] - for bogus_confined_paths in list_of_confined_paths: - for bogus_path in list_of_paths: - self.assertRaises(tuf.FormatError, util.path_in_confined_paths, - bogus_path, bogus_confined_paths) + # Reference to 'file_in_confined_directories()' to improve readability. + in_confined_directory = tuf.util.file_in_confined_directories + list_of_confined_directories = ['a', 12, {'a':'a'}, [1]] + list_of_filepaths = [12, ['a'], {'a':'a'}, 'a'] + for bogus_confined_directory in list_of_confined_directories: + for filepath in list_of_filepaths: + self.assertRaises(tuf.FormatError, in_confined_directory, + filepath, bogus_confined_directory) # Test: Inputs that evaluate to False. - for confined_paths in [['/a/b/c.txt', 'a/b/c'], ['/a/b/c/d/e/']]: - for path in ['/a/b/d.txt', 'a', 'a/b/c/d/']: - self.assertFalse(util.path_in_confined_paths(path, confined_paths)) - + confined_directories = ['a/b/', 'a/b/c/d/'] + self.assertFalse(in_confined_directory('a/b/c/1.txt', confined_directories)) + + confined_directories = ['a/b/c/d/e/'] + self.assertFalse(in_confined_directory('a', confined_directories)) + self.assertFalse(in_confined_directory('a/b', confined_directories)) + self.assertFalse(in_confined_directory('a/b/c', confined_directories)) + self.assertFalse(in_confined_directory('a/b/c/d', confined_directories)) + # Below, 'e' is a file in the 'a/b/c/d/' directory. + self.assertFalse(in_confined_directory('a/b/c/d/e', confined_directories)) + # Test: Inputs that evaluate to True. - for confined_paths in [[''], ['/a/b/c.txt', '/a/', '/a/b/c/d/']]: - for path in ['a/b/d.txt', 'a/b/x', 'a/b', 'a/b/c/d/g']: - self.assertTrue(util.path_in_confined_paths(path, confined_paths)) - + self.assertTrue(in_confined_directory('a/b/c.txt', [''])) + self.assertTrue(in_confined_directory('a/b/c.txt', ['a/b/'])) + self.assertTrue(in_confined_directory('a/b/c.txt', ['x', ''])) + self.assertTrue(in_confined_directory('a/b/c/..', ['a/'])) def test_B4_import_json(self): diff --git a/tuf/tests/unittest_toolbox.py b/tuf/tests/unittest_toolbox.py index efb95a9399..f3e703ab30 100755 --- a/tuf/tests/unittest_toolbox.py +++ b/tuf/tests/unittest_toolbox.py @@ -143,15 +143,15 @@ def setUp(): mirrors = {'mirror1': {'url_prefix' : 'http://mirror1.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['']}, + 'confined_target_dirs' : ['']}, 'mirror2': {'url_prefix' : 'http://mirror2.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['']}, + 'confined_target_dirs' : ['']}, 'mirror3': {'url_prefix' : 'http://mirror3.com', 'metadata_path' : 'metadata', 'targets_path' : 'targets', - 'confined_target_paths' : ['']}} + 'confined_target_dirs' : ['']}} diff --git a/tuf/util.py b/tuf/util.py index 5b0bc18436..a3f9dc9d63 100755 --- a/tuf/util.py +++ b/tuf/util.py @@ -4,19 +4,18 @@ Konstantin Andrianov - Derived from original util.py written by Geremy Condra. - March 24, 2012 + March 24, 2012. Derived from original util.py written by Geremy Condra. See LICENSE for licensing information. Provides utility services. This module supplies utility functions such as: - get_file_details that computes length and hash of a file, import_json that - tries to import a working json module, load_json functions, TempFile class - - generates a file-like object temporary starage, etc. + get_file_details() that computes the length and hash of a file, import_json + that tries to import a working json module, load_json_* functions, and a + TempFile class that generates a file-like object for temporary storage, etc. """ @@ -28,13 +27,12 @@ import logging import tempfile -import logging import tuf.hash import tuf.conf import tuf.formats -# See 'log.py' to learn how logging is handled in TUF +# See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger('tuf.util') @@ -45,7 +43,7 @@ class TempFile(object): cleaned up. This isn't a complete file-like object. The file functions that are supported make additional common-case safe assumptions. There are additional functions that aren't part of file-like objects. TempFile - is used in download.py module to temporarily store downloaded data whild + is used in the download.py module to temporarily store downloaded data while all security checks (file hashes/length) are performed. """ @@ -70,8 +68,7 @@ def __init__(self, prefix='tuf_temp_'): A string argument to be used with tempfile.TemporaryFile function. - OSError on failure to load temp dir. - tuf.Error + tuf.Error on failure to load temp dir. None. @@ -121,10 +118,11 @@ def read(self, size=None): file is read and the file pointer is placed at the beginning of the file. - size: Number of bytes to be read. + size: + Number of bytes to be read. - None + tuf.FormatError: if 'size' is invalid. String of data. @@ -246,7 +244,9 @@ def decompress_temp_file_object(self, compression): a file. Only gzip is allowed. - tuf.Error + tuf.FormatError: If 'compression' is improperly formatted. + + tuf.Error: If an invalid compression is given. 'self._orig_file' is used to store the original data of 'temporary_file'. @@ -256,6 +256,10 @@ def decompress_temp_file_object(self, compression): """ + # Does 'compression' have the correct format? + # Raise 'tuf.FormatError' if there is a mismatch. + tuf.formats.NAME_SCHEMA.check_match(compression) + if self._orig_file is not None: raise tuf.Error('Can only set compression on a TempFile once.') @@ -301,39 +305,40 @@ def close_temp_file(self): -def get_file_details(file_path): +def get_file_details(filepath): """ - To get file's length and hash information. The hash is computed using - sha256 algorithm. This function is used in signerlib.py and updater.py + To get file's length and hash information. The hash is computed using the + sha256 algorithm. This function is used in the signerlib.py and updater.py modules. - file_path: + filepath: Absolute file path of a file. tuf.FormatError: If hash of the file does not match HASHDICT_SCHEMA. - TODO: check non-existing path wich produces OSError. + + tuf.Error: If 'filepath' does not exist. - A tuple (length, hashes) describing file_path. + A tuple (length, hashes) describing 'filepath'. """ - # Making sure that the format of 'file_path' is a path string. - # tuf.FormatError is raised on incorrect format. - tuf.formats.PATH_SCHEMA.check_match(file_path) + # Making sure that the format of 'filepath' is a path string. + # 'tuf.FormatError' is raised on incorrect format. + tuf.formats.PATH_SCHEMA.check_match(filepath) # Does the path exists? - if not os.path.exists(file_path): - raise tuf.Error, 'Path '+repr(file_path)+' doest not exist.' - file_path = os.path.abspath(file_path) + if not os.path.exists(filepath): + raise tuf.Error('Path '+repr(filepath)+' doest not exist.') + filepath = os.path.abspath(filepath) # Obtaining length of the file. - file_length = os.path.getsize(file_path) + file_length = os.path.getsize(filepath) # Obtaining hash of the file. - digest_object = tuf.hash.digest_filename(file_path, algorithm='sha256') + digest_object = tuf.hash.digest_filename(filepath, algorithm='sha256') file_hash = {'sha256' : digest_object.hexdigest()} # Performing a format check to ensure 'file_hash' corresponds HASHDICT_SCHEMA. @@ -359,17 +364,21 @@ def ensure_parent_dir(filename): filename: A path string. + + tuf.FormatError: If 'filename' is improperly formatted. + - A directory is created whenever parent directory of 'filename' does not exist. + A directory is created whenever the parent directory of 'filename' does not + exist. None. """ - # Ensure 'name' corresponds to 'RELPATH_SCHEMA'. + # Ensure 'filename' corresponds to 'PATH_SCHEMA'. # Raise 'tuf.FormatError' on a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filename) + tuf.formats.PATH_SCHEMA.check_match(filename) # Split 'filename' into head and tail, check if head exists. directory = os.path.split(filename)[0] @@ -380,17 +389,19 @@ def ensure_parent_dir(filename): -def path_in_confined_paths(test_path, confined_paths): +def file_in_confined_directories(filepath, confined_directories): """ - Check whether 'test_path' is in the list/tuple of 'confined_paths'. + Check if the directory containing 'filepath' is in the list/tuple of + 'confined_directories'. - test_path: - A string representing a path. + filepath: + A string representing the path of a file. The following example path + strings are viewed as files and not directories: 'a/b/c', 'a/b/c.txt'. - confined_paths: - A list or a tuple of path strings. + confined_directories: + A list, or a tuple, of directory strings. tuf.FormatError: On incorrect format of the input. @@ -401,23 +412,28 @@ def path_in_confined_paths(test_path, confined_paths): """ - # Do the arguments are the correct format? + # Do the arguments have the correct format? # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(test_path) - tuf.formats.RELPATHS_SCHEMA.check_match(confined_paths) + tuf.formats.RELPATH_SCHEMA.check_match(filepath) + tuf.formats.RELPATHS_SCHEMA.check_match(confined_directories) - for pattern in confined_paths: - # Ignore slashes at the beginning. - pattern = pattern.lstrip('/') - - # An empty string signifies the client should be confined to all - # directories and subdirectories. No need to check 'test_path'. - if pattern == '': + for confined_directory in confined_directories: + # The empty string (arbitrarily chosen) signifies the client is confined + # to all directories and subdirectories. No need to check 'filepath'. + if confined_directory == '': return True - # Get the directory name (i.e., strip off the file_path+extension) - directory_name = os.path.dirname(test_path) - if directory_name == os.path.dirname(pattern): + # Normalized paths needed, to account for up-level references, etc. + # TUF clients have the option of setting the list of directories in + # 'confined_directories'. + filepath = os.path.normpath(filepath) + confined_directory = os.path.normpath(confined_directory) + + # A TUF client may restrict himself to specific directories on the + # remote repository. The list of paths in 'confined_path', not including + # each path's subdirectories, are the only directories the client will + # download targets from. + if os.path.dirname(filepath) == confined_directory: return True return False @@ -437,8 +453,10 @@ def import_json(): None. - ImportError on failure to import json or simplejson modules. - NameError + ImportError: on failure to import the json or simplejson modules. + + + None. json/simplejson module @@ -509,8 +527,14 @@ def load_json_string(data): data: A JSON string. + + + None. - + + None. + + Deserialized object. For example a dictionary. """ @@ -528,22 +552,29 @@ def load_json_file(filepath): data: Absolute path of JSON file. + + tuf.FormatError: If 'filepath' is improperly formatted. + + tuf.Error: If 'filepath' could not be opened. + + + None. + - Deserialized object. For example a dictionary. + Deserialized object. For example, a dictionary. """ - # Making sure that the format of 'file_path' is a path string. + # Making sure that the format of 'filepath' is a path string. # tuf.FormatError is raised on incorrect format. tuf.formats.PATH_SCHEMA.check_match(filepath) try: - fp = open(filepath) + fileobject = open(filepath) except IOError, err: raise tuf.Error(err) try: - return json.load(fp) + return json.load(fileobject) finally: - fp.close() - + fileobject.close()