From b350746f4e885c547294aa5fa42c4a43055a67d5 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 15 Apr 2021 16:48:13 +0300 Subject: [PATCH 1/4] Remove mirror_*_download functions mirror_target_download and mirror_meta_download functions that return iterators are causing more troubles than good. Since there is a need to download and verify each file inside the mirrors loop (before continuing to the next mirror), feels like the correct component to do this is the Updater itself. This means slightly repetitive code inside the Updater class but with the benefit of better readability and less bug-prone implementation. Signed-off-by: Teodora Sechkova --- tuf/client_rework/mirrors.py | 68 +----- tuf/client_rework/updater_rework.py | 309 +++++++++++++++++++--------- 2 files changed, 215 insertions(+), 162 deletions(-) diff --git a/tuf/client_rework/mirrors.py b/tuf/client_rework/mirrors.py index 97962e9eb7..83991e64e6 100644 --- a/tuf/client_rework/mirrors.py +++ b/tuf/client_rework/mirrors.py @@ -23,15 +23,13 @@ """ import os -from typing import BinaryIO, Dict, TextIO from urllib import parse from securesystemslib import exceptions as sslib_exceptions from securesystemslib import formats as sslib_formats from securesystemslib import util as sslib_util -from tuf import exceptions, formats -from tuf.client_rework import download +from tuf import formats # The type of file to be downloaded from a repository. The # 'get_list_of_mirrors' function supports these file types. @@ -130,67 +128,3 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): list_of_mirrors.append(url.replace("\\", "/")) return list_of_mirrors - - -def mirror_meta_download( - filename: str, - upper_length: int, - mirrors_config: Dict, - fetcher: "FetcherInterface", -) -> TextIO: - """ - Download metadata file from the list of metadata mirrors - """ - file_mirrors = get_list_of_mirrors("meta", filename, mirrors_config) - - file_mirror_errors = {} - for file_mirror in file_mirrors: - try: - temp_obj = download.download_file( - file_mirror, upper_length, fetcher, strict_required_length=False - ) - - temp_obj.seek(0) - yield temp_obj - - # pylint cannot figure out that we store the exceptions - # in a dictionary to raise them later so we disable - # the warning. This should be reviewed in the future still. - except Exception as exception: # pylint: disable=broad-except - file_mirror_errors[file_mirror] = exception - - finally: - if file_mirror_errors: - raise exceptions.NoWorkingMirrorError(file_mirror_errors) - - -def mirror_target_download( - fileinfo: str, mirrors_config: Dict, fetcher: "FetcherInterface" -) -> BinaryIO: - """ - Download target file from the list of target mirrors - """ - # full_filename = _get_full_name(filename) - file_mirrors = get_list_of_mirrors( - "target", fileinfo["filepath"], mirrors_config - ) - - file_mirror_errors = {} - for file_mirror in file_mirrors: - try: - temp_obj = download.download_file( - file_mirror, fileinfo["fileinfo"]["length"], fetcher - ) - - temp_obj.seek(0) - yield temp_obj - - # pylint cannot figure out that we store the exceptions - # in a dictionary to raise them later so we disable - # the warning. This should be reviewed in the future still. - except Exception as exception: # pylint: disable=broad-except - file_mirror_errors[file_mirror] = exception - - finally: - if file_mirror_errors: - raise exceptions.NoWorkingMirrorError(file_mirror_errors) diff --git a/tuf/client_rework/updater_rework.py b/tuf/client_rework/updater_rework.py index 078f17304e..1049989c89 100644 --- a/tuf/client_rework/updater_rework.py +++ b/tuf/client_rework/updater_rework.py @@ -18,7 +18,7 @@ from tuf import exceptions, settings from tuf.client.fetcher import FetcherInterface -from tuf.client_rework import mirrors, requests_fetcher +from tuf.client_rework import download, mirrors, requests_fetcher from .metadata_wrapper import ( RootWrapper, @@ -147,22 +147,41 @@ def download_target(self, target: Dict, destination_directory: str): This method performs the actual download of the specified target. The file is saved to the 'destination_directory' argument. """ - try: - for temp_obj in mirrors.mirror_target_download( - target, self._mirrors, self._fetcher - ): - self._verify_target_file(temp_obj, target) - # break? should we break after first successful download? + temp_obj = None + file_mirror_errors = {} + file_mirrors = mirrors.get_list_of_mirrors( + "target", target["filepath"], self._mirrors + ) - filepath = os.path.join( - destination_directory, target["filepath"] + for file_mirror in file_mirrors: + try: + temp_obj = download.download_file( + file_mirror, target["fileinfo"]["length"], self._fetcher ) - sslib_util.persist_temp_file(temp_obj, filepath) - # pylint: disable=try-except-raise - except Exception: - # TODO: do something with exceptions - raise + + temp_obj.seek(0) + self._verify_target_file(temp_obj, target) + break + + except Exception as exception: # pylint: disable=broad-except + # Store the exceptions until all mirrors are iterated. + # If an exception is raised from one mirror but a valid + # file is found in the next one, the first exception is ignored. + file_mirror_errors[file_mirror] = exception + + if temp_obj: + temp_obj.close() + temp_obj = None + + # If all mirrors are iterated but a file object is not successfully + # downloaded and verifies, raise the collected errors + if not temp_obj: + raise exceptions.NoWorkingMirrorError(file_mirror_errors) + + filepath = os.path.join(destination_directory, target["filepath"]) + sslib_util.persist_temp_file(temp_obj, filepath) + temp_obj.close() def _get_full_meta_name( self, role: str, extension: str = ".json", version: int = None @@ -204,6 +223,7 @@ def _load_root(self) -> None: """ # Load trusted root metadata + # TODO: this should happen much earlier, on Updater.__init__ self._metadata["root"] = RootWrapper.from_json_file( self._get_full_meta_name("root") ) @@ -213,97 +233,164 @@ def _load_root(self) -> None: # root metadata file. lower_bound = self._metadata["root"].version upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS + intermediate_root = None - verified_root = None for next_version in range(lower_bound, upper_bound): try: - mirror_download = mirrors.mirror_meta_download( + root_mirrors = mirrors.get_list_of_mirrors( + "meta", self._get_relative_meta_name("root", version=next_version), - settings.DEFAULT_ROOT_REQUIRED_LENGTH, self._mirrors, - self._fetcher, ) - for temp_obj in mirror_download: - try: - verified_root = self._verify_root(temp_obj) - # pylint: disable=try-except-raise - except Exception: - raise + # For each version of root iterate over the list of mirrors + # until an intermediate root is successfully downloaded and + # verified. + intermediate_root = self._root_mirrors_download(root_mirrors) + # Exit the loop when all mirrors have raised only 403 / 404 errors, + # which indicates that a bigger root version does not exist. except exceptions.NoWorkingMirrorError as exception: for mirror_error in exception.mirror_errors.values(): + # Otherwise, reraise the error, because it is not a simple + # HTTP error. if neither_403_nor_404(mirror_error): - temp_obj.close() + logger.info( + "Misc error for root version " + str(next_version) + ) raise + logger.debug("HTTP error for root version " + str(next_version)) + # If we are here, then we ran into only 403 / 404 errors, which + # are good reasons to suspect that the next root metadata file + # does not exist. break - # Check for a freeze attack. The latest known time MUST be lower - # than the expiration timestamp in the trusted root metadata file - try: - verified_root.expires() - except exceptions.ExpiredMetadataError: - temp_obj.close() # pylint: disable=undefined-loop-variable - - # 1.9. If the timestamp and / or snapshot keys have been rotated, - # then delete the trusted timestamp and snapshot metadata files. - if self._metadata["root"].keys("timestamp") != verified_root.keys( - "timestamp" - ): - # FIXME: use abstract storage - os.remove(self._get_full_meta_name("timestamp")) - self._metadata["timestamp"] = {} + # Continue only if a newer root version is found + if intermediate_root is not None: + # Check for a freeze attack. The latest known time MUST be lower + # than the expiration timestamp in the trusted root metadata file + # TODO define which exceptions are part of the public API + intermediate_root.expires() + + # 1.9. If the timestamp and / or snapshot keys have been rotated, + # then delete the trusted timestamp and snapshot metadata files. + if self._metadata["root"].keys( + "timestamp" + ) != intermediate_root.keys("timestamp"): + # FIXME: use abstract storage + os.remove(self._get_full_meta_name("timestamp")) + self._metadata["timestamp"] = {} + + if self._metadata["root"].keys( + "snapshot" + ) != intermediate_root.keys("snapshot"): + # FIXME: use abstract storage + os.remove(self._get_full_meta_name("snapshot")) + self._metadata["snapshot"] = {} + + # Set the trusted root metadata file to the new root + # metadata file + self._metadata["root"] = intermediate_root + # Persist root metadata. The client MUST write the file to + # non-volatile storage as FILENAME.EXT (e.g. root.json). + self._metadata["root"].persist(self._get_full_meta_name("root")) + + # 1.10. Set whether consistent snapshots are used as per + # the trusted root metadata file + self._consistent_snapshot = self._metadata[ + "root" + ].signed.consistent_snapshot + + def _root_mirrors_download(self, root_mirrors: Dict) -> "RootWrapper": + """Iterate over the list of "root_mirrors" until an intermediate + root is successfully downloaded and verified. + Raise "NoWorkingMirrorError" if a root file cannot be downloaded or + verified from any mirror""" + + file_mirror_errors = {} + temp_obj = None + intermediate_root = None + + for root_mirror in root_mirrors: + try: + temp_obj = download.download_file( + root_mirror, + settings.DEFAULT_ROOT_REQUIRED_LENGTH, + self._fetcher, + strict_required_length=False, + ) - if self._metadata["root"].keys("snapshot") != verified_root.keys( - "snapshot" - ): - # FIXME: use abstract storage - os.remove(self._get_full_meta_name("snapshot")) - self._metadata["snapshot"] = {} + temp_obj.seek(0) + intermediate_root = self._verify_root(temp_obj) + # When we reach this point, a root file has been successfully + # downloaded and verified so we can exit the loop. + break - self._metadata["root"] = verified_root - # Persist root metadata. The client MUST write the file to non-volatile - # storage as FILENAME.EXT (e.g. root.json). - self._metadata["root"].persist(self._get_full_meta_name("root")) + # pylint cannot figure out that we store the exceptions + # in a dictionary to raise them later so we disable + # the warning. This should be reviewed in the future still. + except Exception as exception: # pylint: disable=broad-except + # Store the exceptions until all mirrors are iterated. + # If an exception is raised from one mirror but a valid + # file is found in the next one, the first exception is ignored. + file_mirror_errors[root_mirror] = exception + + finally: + if temp_obj: + temp_obj.close() + temp_obj = None - # 1.10. Set whether consistent snapshots are used as per - # the trusted root metadata file - self._consistent_snapshot = self._metadata[ - "root" - ].signed.consistent_snapshot + if not intermediate_root: + # If all mirrors are tried but a valid root file is not found, + # then raise an exception with the stored errors + raise exceptions.NoWorkingMirrorError(file_mirror_errors) - temp_obj.close() # pylint: disable=undefined-loop-variable + return intermediate_root def _load_timestamp(self) -> None: """ TODO """ # TODO Check if timestamp exists locally - for temp_obj in mirrors.mirror_meta_download( - "timestamp.json", - settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH, - self._mirrors, - self._fetcher, - ): + file_mirrors = mirrors.get_list_of_mirrors( + "meta", "timestamp.json", self._mirrors + ) + + file_mirror_errors = {} + verified_timestamp = None + for file_mirror in file_mirrors: try: - verified_tampstamp = self._verify_timestamp(temp_obj) - # break? should we break after first successful download? - except Exception: - # TODO: do something with exceptions - temp_obj.close() - raise + temp_obj = download.download_file( + file_mirror, + settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH, + self._fetcher, + strict_required_length=False, + ) - self._metadata["timestamp"] = verified_tampstamp + temp_obj.seek(0) + verified_timestamp = self._verify_timestamp(temp_obj) + break + + except Exception as exception: # pylint: disable=broad-except + file_mirror_errors[file_mirror] = exception + + finally: + if temp_obj: + temp_obj.close() + temp_obj = None + + if not verified_timestamp: + raise exceptions.NoWorkingMirrorError(file_mirror_errors) + + self._metadata["timestamp"] = verified_timestamp # Persist root metadata. The client MUST write the file to # non-volatile storage as FILENAME.EXT (e.g. root.json). self._metadata["timestamp"].persist( self._get_full_meta_name("timestamp.json") ) - temp_obj.close() # pylint: disable=undefined-loop-variable - def _load_snapshot(self) -> None: """ TODO @@ -319,19 +406,37 @@ def _load_snapshot(self) -> None: # else: # version = None - # Check if exists locally - # self.loadLocal('snapshot', snapshotVerifier) - for temp_obj in mirrors.mirror_meta_download( - "snapshot.json", length, self._mirrors, self._fetcher - ): + # TODO: Check if exists locally + file_mirrors = mirrors.get_list_of_mirrors( + "meta", "snapshot.json", self._mirrors + ) + + file_mirror_errors = {} + verified_snapshot = False + for file_mirror in file_mirrors: try: + temp_obj = download.download_file( + file_mirror, + length, + self._fetcher, + strict_required_length=False, + ) + + temp_obj.seek(0) verified_snapshot = self._verify_snapshot(temp_obj) - # break? should we break after first successful download? - except Exception: - # TODO: do something with exceptions - temp_obj.close() - raise + break + + except Exception as exception: # pylint: disable=broad-except + file_mirror_errors[file_mirror] = exception + + finally: + if temp_obj: + temp_obj.close() + temp_obj = None + + if not verified_snapshot: + raise exceptions.NoWorkingMirrorError(file_mirror_errors) self._metadata["snapshot"] = verified_snapshot # Persist root metadata. The client MUST write the file to @@ -340,8 +445,6 @@ def _load_snapshot(self) -> None: self._get_full_meta_name("snapshot.json") ) - temp_obj.close() # pylint: disable=undefined-loop-variable - def _load_targets(self, targets_role: str, parent_role: str) -> None: """ TODO @@ -357,22 +460,40 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None: # else: # version = None - # Check if exists locally - # self.loadLocal('snapshot', targetsVerifier) + # TODO: Check if exists locally - for temp_obj in mirrors.mirror_meta_download( - targets_role + ".json", length, self._mirrors, self._fetcher - ): + file_mirrors = mirrors.get_list_of_mirrors( + "meta", f"{targets_role}.json", self._mirrors + ) + file_mirror_errors = {} + verified_targets = False + for file_mirror in file_mirrors: try: + temp_obj = download.download_file( + file_mirror, + length, + self._fetcher, + strict_required_length=False, + ) + + temp_obj.seek(0) verified_targets = self._verify_targets( temp_obj, targets_role, parent_role ) - # break? should we break after first successful download? - except Exception: - # TODO: do something with exceptions - temp_obj.close() - raise + break + + except Exception as exception: # pylint: disable=broad-except + file_mirror_errors[file_mirror] = exception + + finally: + if temp_obj: + temp_obj.close() + temp_obj = None + + if not verified_targets: + raise exceptions.NoWorkingMirrorError(file_mirror_errors) + self._metadata[targets_role] = verified_targets # Persist root metadata. The client MUST write the file to # non-volatile storage as FILENAME.EXT (e.g. root.json). @@ -380,8 +501,6 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None: self._get_full_meta_name(targets_role, extension=".json") ) - temp_obj.close() # pylint: disable=undefined-loop-variable - def _verify_root(self, temp_obj: TextIO) -> RootWrapper: """ TODO From 482ea1cc86f18f3624939b1dc0883916eb47739e Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 15 Apr 2021 16:59:27 +0300 Subject: [PATCH 2/4] Remove _get_relative_meta_name Current implementation does not bring much benefit and can be replaced with a simple f-string. Signed-off-by: Teodora Sechkova --- tuf/client_rework/updater_rework.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/tuf/client_rework/updater_rework.py b/tuf/client_rework/updater_rework.py index 1049989c89..a6a171bbcb 100644 --- a/tuf/client_rework/updater_rework.py +++ b/tuf/client_rework/updater_rework.py @@ -202,20 +202,6 @@ def _get_full_meta_name( filename, ) - @staticmethod - def _get_relative_meta_name( - role: str, extension: str = ".json", version: int = None - ) -> str: - """ - Helper method returning full metadata file path given the role name - and file extension. - """ - if version is None: - filename = role + extension - else: - filename = str(version) + "." + role + extension - return filename - def _load_root(self) -> None: """ If metadata file for 'root' role does not exist locally, download it @@ -239,7 +225,7 @@ def _load_root(self) -> None: try: root_mirrors = mirrors.get_list_of_mirrors( "meta", - self._get_relative_meta_name("root", version=next_version), + f"{next_version}.root.json", self._mirrors, ) From e885ae4a47e8c00c40a049a6bccafed3ce016535 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 15 Apr 2021 17:31:30 +0300 Subject: [PATCH 3/4] Replace file objects used as function arguments By replacing file objects passed as function arguments with the read file content, we simplify temporary file objects life cycle management. Temporary files are handled in a single function. This is done for metadata files, which are fully read into memory right after download, anyway. Same is not true for target files which preferably should be treated in chunks so targets download and verification still deal with file objects. _check_hashes is split in two functions, one dealing correctly with file objects and one using directly file content. Signed-off-by: Teodora Sechkova --- tuf/client_rework/metadata_wrapper.py | 3 +- tuf/client_rework/updater_rework.py | 76 +++++++++++++-------------- 2 files changed, 39 insertions(+), 40 deletions(-) diff --git a/tuf/client_rework/metadata_wrapper.py b/tuf/client_rework/metadata_wrapper.py index 18f0d6d9aa..b1dd2d1d58 100644 --- a/tuf/client_rework/metadata_wrapper.py +++ b/tuf/client_rework/metadata_wrapper.py @@ -22,9 +22,8 @@ def __init__(self, meta): self._meta = meta @classmethod - def from_json_object(cls, tmp_file): + def from_json_object(cls, raw_data): """Loads JSON-formatted TUF metadata from a file object.""" - raw_data = tmp_file.read() # Use local scope import to avoid circular import errors # pylint: disable=import-outside-toplevel from tuf.api.serialization.json import JSONDeserializer diff --git a/tuf/client_rework/updater_rework.py b/tuf/client_rework/updater_rework.py index a6a171bbcb..435a21485f 100644 --- a/tuf/client_rework/updater_rework.py +++ b/tuf/client_rework/updater_rework.py @@ -10,7 +10,7 @@ import fnmatch import logging import os -from typing import BinaryIO, Dict, Optional, TextIO +from typing import Dict, Optional from securesystemslib import exceptions as sslib_exceptions from securesystemslib import hash as sslib_hash @@ -159,9 +159,9 @@ def download_target(self, target: Dict, destination_directory: str): temp_obj = download.download_file( file_mirror, target["fileinfo"]["length"], self._fetcher ) - + _check_file_length(temp_obj, target["fileinfo"]["length"]) temp_obj.seek(0) - self._verify_target_file(temp_obj, target) + _check_hashes_obj(temp_obj, target["fileinfo"]["hashes"]) break except Exception as exception: # pylint: disable=broad-except @@ -308,7 +308,7 @@ def _root_mirrors_download(self, root_mirrors: Dict) -> "RootWrapper": ) temp_obj.seek(0) - intermediate_root = self._verify_root(temp_obj) + intermediate_root = self._verify_root(temp_obj.read()) # When we reach this point, a root file has been successfully # downloaded and verified so we can exit the loop. break @@ -356,7 +356,7 @@ def _load_timestamp(self) -> None: ) temp_obj.seek(0) - verified_timestamp = self._verify_timestamp(temp_obj) + verified_timestamp = self._verify_timestamp(temp_obj.read()) break except Exception as exception: # pylint: disable=broad-except @@ -410,7 +410,7 @@ def _load_snapshot(self) -> None: ) temp_obj.seek(0) - verified_snapshot = self._verify_snapshot(temp_obj) + verified_snapshot = self._verify_snapshot(temp_obj.read()) break except Exception as exception: # pylint: disable=broad-except @@ -465,7 +465,7 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None: temp_obj.seek(0) verified_targets = self._verify_targets( - temp_obj, targets_role, parent_role + temp_obj.read(), targets_role, parent_role ) break @@ -487,12 +487,12 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None: self._get_full_meta_name(targets_role, extension=".json") ) - def _verify_root(self, temp_obj: TextIO) -> RootWrapper: + def _verify_root(self, file_content: bytes) -> RootWrapper: """ TODO """ - intermediate_root = RootWrapper.from_json_object(temp_obj) + intermediate_root = RootWrapper.from_json_object(file_content) # Check for an arbitrary software attack trusted_root = self._metadata["root"] @@ -505,7 +505,6 @@ def _verify_root(self, temp_obj: TextIO) -> RootWrapper: # Check for a rollback attack. if intermediate_root.version < trusted_root.version: - temp_obj.close() raise exceptions.ReplayedMetadataError( "root", intermediate_root.version(), trusted_root.version() ) @@ -514,11 +513,11 @@ def _verify_root(self, temp_obj: TextIO) -> RootWrapper: return intermediate_root - def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper: + def _verify_timestamp(self, file_content: bytes) -> TimestampWrapper: """ TODO """ - intermediate_timestamp = TimestampWrapper.from_json_object(temp_obj) + intermediate_timestamp = TimestampWrapper.from_json_object(file_content) # Check for an arbitrary software attack trusted_root = self._metadata["root"] @@ -532,7 +531,6 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper: intermediate_timestamp.signed.version <= self._metadata["timestamp"].version ): - temp_obj.close() raise exceptions.ReplayedMetadataError( "root", intermediate_timestamp.version(), @@ -544,7 +542,6 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper: intermediate_timestamp.snapshot.version <= self._metadata["timestamp"].snapshot["version"] ): - temp_obj.close() raise exceptions.ReplayedMetadataError( "root", intermediate_timestamp.snapshot.version(), @@ -555,7 +552,7 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper: return intermediate_timestamp - def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper: + def _verify_snapshot(self, file_content: bytes) -> SnapshotWrapper: """ TODO """ @@ -563,16 +560,15 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper: # Check against timestamp metadata if self._metadata["timestamp"].snapshot.get("hash"): _check_hashes( - temp_obj, self._metadata["timestamp"].snapshot.get("hash") + file_content, self._metadata["timestamp"].snapshot.get("hash") ) - intermediate_snapshot = SnapshotWrapper.from_json_object(temp_obj) + intermediate_snapshot = SnapshotWrapper.from_json_object(file_content) if ( intermediate_snapshot.version != self._metadata["timestamp"].snapshot["version"] ): - temp_obj.close() raise exceptions.BadVersionNumberError # Check for an arbitrary software attack @@ -588,7 +584,6 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper: target_role["version"] != self._metadata["snapshot"].meta[target_role]["version"] ): - temp_obj.close() raise exceptions.BadVersionNumberError intermediate_snapshot.expires() @@ -596,7 +591,7 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper: return intermediate_snapshot def _verify_targets( - self, temp_obj: TextIO, filename: str, parent_role: str + self, file_content: bytes, filename: str, parent_role: str ) -> TargetsWrapper: """ TODO @@ -605,15 +600,14 @@ def _verify_targets( # Check against timestamp metadata if self._metadata["snapshot"].role(filename).get("hash"): _check_hashes( - temp_obj, self._metadata["snapshot"].targets.get("hash") + file_content, self._metadata["snapshot"].targets.get("hash") ) - intermediate_targets = TargetsWrapper.from_json_object(temp_obj) + intermediate_targets = TargetsWrapper.from_json_object(file_content) if ( intermediate_targets.version != self._metadata["snapshot"].role(filename)["version"] ): - temp_obj.close() raise exceptions.BadVersionNumberError # Check for an arbitrary software attack @@ -627,15 +621,6 @@ def _verify_targets( return intermediate_targets - @staticmethod - def _verify_target_file(temp_obj: BinaryIO, targetinfo: Dict) -> None: - """ - TODO - """ - - _check_file_length(temp_obj, targetinfo["fileinfo"]["length"]) - _check_hashes(temp_obj, targetinfo["fileinfo"]["hashes"]) - def _preorder_depth_first_walk(self, target_filepath) -> Dict: """ TODO @@ -864,7 +849,25 @@ def _check_file_length(file_object, trusted_file_length): ) -def _check_hashes(file_object, trusted_hashes): +def _check_hashes_obj(file_object, trusted_hashes): + """ + TODO + """ + for algorithm, trusted_hash in trusted_hashes.items(): + digest_object = sslib_hash.digest_fileobject(file_object, algorithm) + + computed_hash = digest_object.hexdigest() + + # Raise an exception if any of the hashes are incorrect. + if trusted_hash != computed_hash: + raise sslib_exceptions.BadHashError(trusted_hash, computed_hash) + + logger.info( + "The file's " + algorithm + " hash is" " correct: " + trusted_hash + ) + + +def _check_hashes(file_content, trusted_hashes): """ TODO """ @@ -872,11 +875,8 @@ def _check_hashes(file_object, trusted_hashes): # return. for algorithm, trusted_hash in trusted_hashes.items(): digest_object = sslib_hash.digest(algorithm) - # Ensure we read from the beginning of the file object - # TODO: should we store file position (before the loop) and reset - # after we seek about? - file_object.seek(0) - digest_object.update(file_object.read()) + + digest_object.update(file_content) computed_hash = digest_object.hexdigest() # Raise an exception if any of the hashes are incorrect. From dd110988b257a1be7f30f14ceb9794d6466ef0e6 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Tue, 27 Apr 2021 14:07:35 +0300 Subject: [PATCH 4/4] Replace sslib exceptions Replace sslib exceptions raised in Updater with the corresponding ones defined in tuf. Signed-off-by: Teodora Sechkova --- tuf/client_rework/updater_rework.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tuf/client_rework/updater_rework.py b/tuf/client_rework/updater_rework.py index 435a21485f..c6872c6478 100644 --- a/tuf/client_rework/updater_rework.py +++ b/tuf/client_rework/updater_rework.py @@ -860,7 +860,7 @@ def _check_hashes_obj(file_object, trusted_hashes): # Raise an exception if any of the hashes are incorrect. if trusted_hash != computed_hash: - raise sslib_exceptions.BadHashError(trusted_hash, computed_hash) + raise exceptions.BadHashError(trusted_hash, computed_hash) logger.info( "The file's " + algorithm + " hash is" " correct: " + trusted_hash @@ -881,7 +881,7 @@ def _check_hashes(file_content, trusted_hashes): # Raise an exception if any of the hashes are incorrect. if trusted_hash != computed_hash: - raise sslib_exceptions.BadHashError(trusted_hash, computed_hash) + raise exceptions.BadHashError(trusted_hash, computed_hash) logger.info( "The file's " + algorithm + " hash is" " correct: " + trusted_hash