Skip to content

Commit

Permalink
Merge pull request #1328 from avelichka/experimental-client
Browse files Browse the repository at this point in the history
Fixes imports to be vendoring compatible
  • Loading branch information
Jussi Kukkonen committed Apr 6, 2021
2 parents 2b9f838 + 58f9b4c commit ad30da8
Showing 1 changed file with 39 additions and 49 deletions.
88 changes: 39 additions & 49 deletions tuf/client_rework/updater_rework.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,12 @@
import os
from typing import BinaryIO, Dict, Optional, TextIO

import securesystemslib.exceptions
import securesystemslib.util

import tuf.download
import tuf.exceptions
import tuf.formats
import tuf.mirrors
import tuf.settings
from securesystemslib import exceptions as sslib_exceptions
from securesystemslib import hash as sslib_hash
from securesystemslib import util as sslib_util

from tuf import download, exceptions, mirrors, requests_fetcher, settings
from tuf.client.fetcher import FetcherInterface
from tuf.requests_fetcher import RequestsFetcher

from .metadata_wrapper import (
RootWrapper,
Expand Down Expand Up @@ -63,7 +59,7 @@ def __init__(
self._metadata = {}

if fetcher is None:
self._fetcher = RequestsFetcher()
self._fetcher = requests_fetcher.RequestsFetcher()
else:
self._fetcher = fetcher

Expand Down Expand Up @@ -126,13 +122,13 @@ def updated_targets(targets: Dict, destination_directory: str) -> Dict:
for algorithm, digest in target["fileinfo"]["hashes"].items():
digest_object = None
try:
digest_object = securesystemslib.hash.digest_filename(
digest_object = sslib_hash.digest_filename(
target_filepath, algorithm=algorithm
)

# This exception will occur if the target does not exist
# locally.
except securesystemslib.exceptions.StorageError:
except sslib_exceptions.StorageError:
updated_targets.append(target)
updated_targetpaths.append(target_filepath)
break
Expand All @@ -159,7 +155,7 @@ def download_target(self, target: Dict, destination_directory: str):
filepath = os.path.join(
destination_directory, target["filepath"]
)
securesystemslib.util.persist_temp_file(temp_obj, filepath)
sslib_util.persist_temp_file(temp_obj, filepath)
# pylint: disable=try-except-raise
except Exception:
# TODO: do something with exceptions
Expand All @@ -169,14 +165,14 @@ def _mirror_meta_download(self, filename: str, upper_length: int) -> TextIO:
"""
Download metadata file from the list of metadata mirrors
"""
file_mirrors = tuf.mirrors.get_list_of_mirrors(
file_mirrors = mirrors.get_list_of_mirrors(
"meta", filename, self._mirrors
)

file_mirror_errors = {}
for file_mirror in file_mirrors:
try:
temp_obj = tuf.download.unsafe_download(
temp_obj = download.unsafe_download(
file_mirror, upper_length, self._fetcher
)

Expand All @@ -189,23 +185,21 @@ def _mirror_meta_download(self, filename: str, upper_length: int) -> TextIO:

finally:
if file_mirror_errors:
raise tuf.exceptions.NoWorkingMirrorError(
file_mirror_errors
)
raise exceptions.NoWorkingMirrorError(file_mirror_errors)

def _mirror_target_download(self, fileinfo: str) -> BinaryIO:
"""
Download target file from the list of target mirrors
"""
# full_filename = _get_full_name(filename)
file_mirrors = tuf.mirrors.get_list_of_mirrors(
file_mirrors = mirrors.get_list_of_mirrors(
"target", fileinfo["filepath"], self._mirrors
)

file_mirror_errors = {}
for file_mirror in file_mirrors:
try:
temp_obj = tuf.download.safe_download(
temp_obj = download.safe_download(
file_mirror, fileinfo["fileinfo"]["length"], self._fetcher
)

Expand All @@ -217,9 +211,7 @@ def _mirror_target_download(self, fileinfo: str) -> BinaryIO:

finally:
if file_mirror_errors:
raise tuf.exceptions.NoWorkingMirrorError(
file_mirror_errors
)
raise exceptions.NoWorkingMirrorError(file_mirror_errors)

def _get_full_meta_name(
self, role: str, extension: str = ".json", version: int = None
Expand All @@ -233,7 +225,7 @@ def _get_full_meta_name(
else:
filename = str(version) + "." + role + extension
return os.path.join(
tuf.settings.repositories_directory,
settings.repositories_directory,
self._repository_name,
"metadata",
"current",
Expand Down Expand Up @@ -269,14 +261,14 @@ def _load_root(self) -> None:
# 1.1. Let N denote the version number of the trusted
# root metadata file.
lower_bound = self._metadata["root"].version
upper_bound = lower_bound + tuf.settings.MAX_NUMBER_ROOT_ROTATIONS
upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS

verified_root = None
for next_version in range(lower_bound, upper_bound):
try:
mirror_download = self._mirror_meta_download(
self._get_relative_meta_name("root", version=next_version),
tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH,
settings.DEFAULT_ROOT_REQUIRED_LENGTH,
)

for temp_obj in mirror_download:
Expand All @@ -286,7 +278,7 @@ def _load_root(self) -> None:
except Exception:
raise

except tuf.exceptions.NoWorkingMirrorError as exception:
except exceptions.NoWorkingMirrorError as exception:
for mirror_error in exception.mirror_errors.values():
if neither_403_nor_404(mirror_error):
temp_obj.close()
Expand All @@ -298,7 +290,7 @@ def _load_root(self) -> None:
# than the expiration timestamp in the trusted root metadata file
try:
verified_root.expires()
except tuf.exceptions.ExpiredMetadataError:
except exceptions.ExpiredMetadataError:
temp_obj.close() # pylint: disable=undefined-loop-variable

# 1.9. If the timestamp and / or snapshot keys have been rotated,
Expand Down Expand Up @@ -336,7 +328,7 @@ def _load_timestamp(self) -> None:
"""
# TODO Check if timestamp exists locally
for temp_obj in self._mirror_meta_download(
"timestamp.json", tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH
"timestamp.json", settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH
):
try:
verified_tampstamp = self._verify_timestamp(temp_obj)
Expand All @@ -362,7 +354,7 @@ def _load_snapshot(self) -> None:
try:
length = self._metadata["timestamp"].snapshot["length"]
except KeyError:
length = tuf.settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH
length = settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH

# Uncomment when implementing consistent_snapshot
# if self._consistent_snapshot:
Expand Down Expand Up @@ -397,7 +389,7 @@ def _load_targets(self, targets_role: str, parent_role: str) -> None:
try:
length = self._metadata["snapshot"].role(targets_role)["length"]
except KeyError:
length = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH
length = settings.DEFAULT_TARGETS_REQUIRED_LENGTH

# Uncomment when implementing consistent_snapshot
# if self._consistent_snapshot:
Expand Down Expand Up @@ -448,7 +440,7 @@ def _verify_root(self, temp_obj: TextIO) -> RootWrapper:
# Check for a rollback attack.
if intermediate_root.version < trusted_root.version:
temp_obj.close()
raise tuf.exceptions.ReplayedMetadataError(
raise exceptions.ReplayedMetadataError(
"root", intermediate_root.version(), trusted_root.version()
)
# Note that the expiration of the new (intermediate) root metadata
Expand All @@ -475,7 +467,7 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
<= self._metadata["timestamp"].version
):
temp_obj.close()
raise tuf.exceptions.ReplayedMetadataError(
raise exceptions.ReplayedMetadataError(
"root",
intermediate_timestamp.version(),
self._metadata["timestamp"].version(),
Expand All @@ -487,7 +479,7 @@ def _verify_timestamp(self, temp_obj: TextIO) -> TimestampWrapper:
<= self._metadata["timestamp"].snapshot["version"]
):
temp_obj.close()
raise tuf.exceptions.ReplayedMetadataError(
raise exceptions.ReplayedMetadataError(
"root",
intermediate_timestamp.snapshot.version(),
self._metadata["snapshot"].version(),
Expand Down Expand Up @@ -515,7 +507,7 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper:
!= self._metadata["timestamp"].snapshot["version"]
):
temp_obj.close()
raise tuf.exceptions.BadVersionNumberError
raise exceptions.BadVersionNumberError

# Check for an arbitrary software attack
trusted_root = self._metadata["root"]
Expand All @@ -531,7 +523,7 @@ def _verify_snapshot(self, temp_obj: TextIO) -> SnapshotWrapper:
!= self._metadata["snapshot"].meta[target_role]["version"]
):
temp_obj.close()
raise tuf.exceptions.BadVersionNumberError
raise exceptions.BadVersionNumberError

intermediate_snapshot.expires()

Expand All @@ -556,7 +548,7 @@ def _verify_targets(
!= self._metadata["snapshot"].role(filename)["version"]
):
temp_obj.close()
raise tuf.exceptions.BadVersionNumberError
raise exceptions.BadVersionNumberError

# Check for an arbitrary software attack
parent_role = self._metadata[parent_role]
Expand Down Expand Up @@ -586,12 +578,12 @@ def _preorder_depth_first_walk(self, target_filepath) -> Dict:
target = None
role_names = [("targets", "root")]
visited_role_names = set()
number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS
number_of_delegations = settings.MAX_NUMBER_OF_DELEGATIONS

# Ensure the client has the most up-to-date version of 'targets.json'.
# Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata
# Raise 'exceptions.NoWorkingMirrorError' if the changed metadata
# cannot be successfully downloaded and
# 'tuf.exceptions.RepositoryError' if the referenced metadata is
# 'exceptions.RepositoryError' if the referenced metadata is
# missing. Target methods such as this one are called after the
# top-level metadata have been refreshed (i.e., updater.refresh()).
# self._update_metadata_if_changed('targets')
Expand Down Expand Up @@ -684,7 +676,7 @@ def _preorder_depth_first_walk(self, target_filepath) -> Dict:
msg = (
f"{len(role_names)} roles left to visit, ",
"but allowed to visit at most ",
f"{tuf.settings.MAX_NUMBER_OF_DELEGATIONS}",
f"{settings.MAX_NUMBER_OF_DELEGATIONS}",
" delegations.",
)
logger.debug(msg)
Expand Down Expand Up @@ -780,7 +772,7 @@ def _visit_child_role(child_role: Dict, target_filepath: str) -> str:
# 'role_name' should have been validated when it was downloaded.
# The 'paths' or 'path_hash_prefixes' fields should not be missing,
# so we raise a format error here in case they are both missing.
raise tuf.exceptions.FormatError(
raise exceptions.FormatError(
repr(child_role_name) + " "
'has neither a "paths" nor "path_hash_prefixes". At least'
" one of these attributes must be present."
Expand All @@ -801,7 +793,7 @@ def _check_file_length(file_object, trusted_file_length):
# ensures that a downloaded file strictly matches a known, or trusted,
# file length.
if observed_length != trusted_file_length:
raise tuf.exceptions.DownloadLengthMismatchError(
raise exceptions.DownloadLengthMismatchError(
trusted_file_length, observed_length
)

Expand All @@ -813,7 +805,7 @@ def _check_hashes(file_object, trusted_hashes):
# Verify each trusted hash of 'trusted_hashes'. If all are valid, simply
# return.
for algorithm, trusted_hash in trusted_hashes.items():
digest_object = securesystemslib.hash.digest(algorithm)
digest_object = sslib_hash.digest(algorithm)
# Ensure we read from the beginning of the file object
# TODO: should we store file position (before the loop) and reset
# after we seek about?
Expand All @@ -823,9 +815,7 @@ def _check_hashes(file_object, trusted_hashes):

# Raise an exception if any of the hashes are incorrect.
if trusted_hash != computed_hash:
raise securesystemslib.exceptions.BadHashError(
trusted_hash, computed_hash
)
raise sslib_exceptions.BadHashError(trusted_hash, computed_hash)

logger.info(
"The file's " + algorithm + " hash is" " correct: " + trusted_hash
Expand All @@ -839,7 +829,7 @@ def _get_target_hash(target_filepath, hash_function="sha256"):
# Calculate the hash of the filepath to determine which bin to find the
# target. The client currently assumes the repository (i.e., repository
# tool) uses 'hash_function' to generate hashes and UTF-8.
digest_object = securesystemslib.hash.digest(hash_function)
digest_object = sslib_hash.digest(hash_function)
encoded_target_filepath = target_filepath.encode("utf-8")
digest_object.update(encoded_target_filepath)
target_filepath_hash = digest_object.hexdigest()
Expand All @@ -851,7 +841,7 @@ def neither_403_nor_404(mirror_error):
"""
TODO
"""
if isinstance(mirror_error, tuf.exceptions.FetcherHTTPError):
if isinstance(mirror_error, exceptions.FetcherHTTPError):
if mirror_error.status_code in {403, 404}:
return False
return True

0 comments on commit ad30da8

Please sign in to comment.