Skip to content

Commit

Permalink
Fix client imports
Browse files Browse the repository at this point in the history
Fix imports to be vendoring compatible.

Signed-off-by: Teodora Sechkova <tsechkova@vmware.com>
  • Loading branch information
sechkova committed Apr 8, 2021
1 parent 55c1f54 commit f959bc5
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 38 deletions.
20 changes: 9 additions & 11 deletions tuf/client_rework/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,10 @@
import timeit
import urllib

import securesystemslib
import securesystemslib.util
from securesystemslib import formats as sslib_formats

import tuf
import tuf.exceptions
import tuf.formats
from tuf import exceptions, formats

# See 'log.py' to learn how logging is handled in TUF.
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -65,7 +63,7 @@ def download_file(url, required_length, fetcher, strict_required_length=True):
A file object is created on disk to store the contents of 'url'.
<Exceptions>
tuf.exceptions.DownloadLengthMismatchError, if there was a
exceptions.DownloadLengthMismatchError, if there was a
mismatch of observed vs expected lengths while downloading the file.
securesystemslib.exceptions.FormatError, if any of the arguments are
Expand All @@ -78,8 +76,8 @@ def download_file(url, required_length, fetcher, strict_required_length=True):
"""
# Do all of the arguments have the appropriate format?
# Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch.
securesystemslib.formats.URL_SCHEMA.check_match(url)
tuf.formats.LENGTH_SCHEMA.check_match(required_length)
sslib_formats.URL_SCHEMA.check_match(url)
formats.LENGTH_SCHEMA.check_match(required_length)

# 'url.replace('\\', '/')' is needed for compatibility with Windows-based
# systems, because they might use back-slashes in place of forward-slashes.
Expand Down Expand Up @@ -185,7 +183,7 @@ def _check_downloaded_length(
strict_required_length is True and total_downloaded is not equal
required_length.
tuf.exceptions.SlowRetrievalError, if the total downloaded was
exceptions.SlowRetrievalError, if the total downloaded was
done in less than the acceptable download speed (as set in
tuf.settings.py).
Expand Down Expand Up @@ -223,15 +221,15 @@ def _check_downloaded_length(
logger.debug(msg)

if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
raise tuf.exceptions.SlowRetrievalError(average_download_speed)
raise exceptions.SlowRetrievalError(average_download_speed)

msg = (
f"Good average download speed: "
f"{average_download_speed} bytes per second"
)
logger.debug(msg)

raise tuf.exceptions.DownloadLengthMismatchError(
raise exceptions.DownloadLengthMismatchError(
required_length, total_downloaded
)

Expand All @@ -241,7 +239,7 @@ def _check_downloaded_length(
# signed metadata; so, we must guess a reasonable required_length
# for it.
if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
raise tuf.exceptions.SlowRetrievalError(average_download_speed)
raise exceptions.SlowRetrievalError(average_download_speed)

msg = (
f"Good average download speed: "
Expand Down
8 changes: 4 additions & 4 deletions tuf/client_rework/metadata_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from securesystemslib.keys import format_metadata_to_key

import tuf.exceptions
from tuf import exceptions, formats
from tuf.api import metadata


Expand Down Expand Up @@ -64,7 +64,7 @@ def verify(self, keys, threshold):
verified += 1

if verified < threshold:
raise tuf.exceptions.InsufficientKeysError
raise exceptions.InsufficientKeysError

def persist(self, filename):
"""
Expand All @@ -77,13 +77,13 @@ def expires(self, reference_time=None):
TODO
"""
if reference_time is None:
expires_timestamp = tuf.formats.datetime_to_unix_timestamp(
expires_timestamp = formats.datetime_to_unix_timestamp(
self._meta.signed.expires
)
reference_time = int(time.time())

if expires_timestamp < reference_time:
raise tuf.exceptions.ExpiredMetadataError
raise exceptions.ExpiredMetadataError


class RootWrapper(MetadataWrapper):
Expand Down
23 changes: 12 additions & 11 deletions tuf/client_rework/mirrors.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,12 @@
import urllib
from typing import BinaryIO, Dict, TextIO

import securesystemslib
from securesystemslib import exceptions as sslib_exceptions
from securesystemslib import formats as sslib_formats
from securesystemslib import util as sslib_util

import tuf
import tuf.client_rework.download as download
import tuf.formats
from tuf import exceptions, formats
from tuf.client_rework import download

# The type of file to be downloaded from a repository. The
# 'get_list_of_mirrors' function supports these file types.
Expand Down Expand Up @@ -78,13 +79,13 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict):
"""

# Checking if all the arguments have appropriate format.
tuf.formats.RELPATH_SCHEMA.check_match(file_path)
tuf.formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict)
securesystemslib.formats.NAME_SCHEMA.check_match(file_type)
formats.RELPATH_SCHEMA.check_match(file_path)
formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict)
sslib_formats.NAME_SCHEMA.check_match(file_type)

# Verify 'file_type' is supported.
if file_type not in _SUPPORTED_FILE_TYPES:
raise securesystemslib.exceptions.Error(
raise sslib_exceptions.Error(
"Invalid file_type argument."
" Supported file types: " + repr(_SUPPORTED_FILE_TYPES)
)
Expand All @@ -96,7 +97,7 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict):
# on a repository mirror when fetching target files. This field may be set
# by the client when the repository mirror is added to the
# 'tuf.client.updater.Updater' object.
in_confined_directory = securesystemslib.util.file_in_confined_directories
in_confined_directory = sslib_util.file_in_confined_directories

list_of_mirrors = []
for dummy, mirror_info in mirrors_dict.items():
Expand Down Expand Up @@ -160,7 +161,7 @@ def mirror_meta_download(

finally:
if file_mirror_errors:
raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors)
raise exceptions.NoWorkingMirrorError(file_mirror_errors)


def mirror_target_download(
Expand Down Expand Up @@ -192,4 +193,4 @@ def mirror_target_download(

finally:
if file_mirror_errors:
raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors)
raise exceptions.NoWorkingMirrorError(file_mirror_errors)
22 changes: 11 additions & 11 deletions tuf/client_rework/requests_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
import requests
import urllib3.exceptions

import tuf.exceptions
import tuf.settings
import tuf
from tuf import exceptions, settings
from tuf.client_rework.fetcher import FetcherInterface

# Globals
Expand Down Expand Up @@ -58,9 +58,9 @@ def fetch(self, url, required_length):
bytes.
Raises:
tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving
exceptions.SlowRetrievalError: A timeout occurs while receiving
data.
tuf.exceptions.FetcherHTTPError: An HTTP error code is received.
exceptions.FetcherHTTPError: An HTTP error code is received.
Returns:
A bytes iterator
Expand All @@ -76,15 +76,15 @@ def fetch(self, url, required_length):
# - connect timeout (max delay before first byte is received)
# - read (gap) timeout (max delay between bytes received)
response = session.get(
url, stream=True, timeout=tuf.settings.SOCKET_TIMEOUT
url, stream=True, timeout=settings.SOCKET_TIMEOUT
)
# Check response status.
try:
response.raise_for_status()
except requests.HTTPError as e:
response.close()
status = e.response.status_code
raise tuf.exceptions.FetcherHTTPError(str(e), status)
raise exceptions.FetcherHTTPError(str(e), status)

# Define a generator function to be returned by fetch. This way the
# caller of fetch can differentiate between connection and actual data
Expand All @@ -99,11 +99,11 @@ def chunks():
# large file in one shot. Before beginning the round, sleep
# (if set) for a short amount of time so that the CPU is not
# hogged in the while loop.
if tuf.settings.SLEEP_BEFORE_ROUND:
time.sleep(tuf.settings.SLEEP_BEFORE_ROUND)
if settings.SLEEP_BEFORE_ROUND:
time.sleep(settings.SLEEP_BEFORE_ROUND)

read_amount = min(
tuf.settings.CHUNK_SIZE,
settings.CHUNK_SIZE,
required_length - bytes_received,
)

Expand Down Expand Up @@ -131,7 +131,7 @@ def chunks():
break

except urllib3.exceptions.ReadTimeoutError as e:
raise tuf.exceptions.SlowRetrievalError(str(e))
raise exceptions.SlowRetrievalError(str(e))

finally:
response.close()
Expand All @@ -147,7 +147,7 @@ def _get_session(self, url):
parsed_url = urllib.parse.urlparse(url)

if not parsed_url.scheme or not parsed_url.hostname:
raise tuf.exceptions.URLParsingError(
raise exceptions.URLParsingError(
"Could not get scheme and hostname from URL: " + url
)

Expand Down
2 changes: 1 addition & 1 deletion tuf/client_rework/updater_rework.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from tuf import exceptions, settings
from tuf.client.fetcher import FetcherInterface
from tuf.client_rework import download, mirrors, requests_fetcher
from tuf.client_rework import mirrors, requests_fetcher

from .metadata_wrapper import (
RootWrapper,
Expand Down

0 comments on commit f959bc5

Please sign in to comment.