Skip to content

Commit

Permalink
Add unsafe changes
Browse files Browse the repository at this point in the history
  • Loading branch information
nateprewitt committed Jun 17, 2024
1 parent 425c904 commit 1c99dc4
Show file tree
Hide file tree
Showing 28 changed files with 61 additions and 67 deletions.
4 changes: 2 additions & 2 deletions s3transfer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -814,8 +814,8 @@ def _validate_all_known_args(self, actual, allowed):
for kwarg in actual:
if kwarg not in allowed:
raise ValueError(
"Invalid extra_args key '%s', "
"must be one of: %s" % (kwarg, ', '.join(allowed))
f"Invalid extra_args key '{kwarg}', "
f"must be one of: {', '.join(allowed)}"
)

def _ranged_download(
Expand Down
4 changes: 2 additions & 2 deletions s3transfer/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,5 @@
'ExpectedBucketOwner',
]

USER_AGENT = 's3transfer/%s' % s3transfer.__version__
PROCESS_USER_AGENT = '%s processpool' % USER_AGENT
USER_AGENT = f's3transfer/{s3transfer.__version__}'
PROCESS_USER_AGENT = f'{USER_AGENT} processpool'
2 changes: 1 addition & 1 deletion s3transfer/copies.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def _get_head_object_request_from_copy_source(self, copy_source):
raise TypeError(
'Expecting dictionary formatted: '
'{"Bucket": bucket_name, "Key": key} '
'but got %s or type %s.' % (copy_source, type(copy_source))
f'but got {copy_source} or type {type(copy_source)}.'
)

def _extra_upload_part_args(self, extra_args):
Expand Down
6 changes: 3 additions & 3 deletions s3transfer/futures.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,8 +293,8 @@ def _transition_to_non_done_state(self, desired_state):
with self._lock:
if self.done():
raise RuntimeError(
'Unable to transition from done state %s to non-done '
'state %s.' % (self.status, desired_state)
f'Unable to transition from done state {self.status} to non-done '
f'state {desired_state}.'
)
self._status = desired_state

Expand Down Expand Up @@ -396,7 +396,7 @@ def _run_callback(self, callback):
# We do not want a callback interrupting the process, especially
# in the failure cleanups. So log and catch, the exception.
except Exception:
logger.debug("Exception raised in %s." % callback, exc_info=True)
logger.debug(f"Exception raised in {callback}.", exc_info=True)


class BoundedExecutor:
Expand Down
12 changes: 6 additions & 6 deletions s3transfer/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,8 @@ def _validate_attrs_are_nonzero(self):
for attr, attr_val in self.__dict__.items():
if attr_val is not None and attr_val <= 0:
raise ValueError(
'Provided parameter %s of value %s must be greater than '
'0.' % (attr, attr_val)
f'Provided parameter {attr} of value {attr_val} must '
'be greater than 0.'
)


Expand Down Expand Up @@ -492,16 +492,16 @@ def _validate_if_bucket_supported(self, bucket):
match = pattern.match(bucket)
if match:
raise ValueError(
'TransferManager methods do not support %s '
'resource. Use direct client calls instead.' % resource
f'TransferManager methods do not support {resource} '
'resource. Use direct client calls instead.'
)

def _validate_all_known_args(self, actual, allowed):
for kwarg in actual:
if kwarg not in allowed:
raise ValueError(
"Invalid extra_args key '%s', "
"must be one of: %s" % (kwarg, ', '.join(allowed))
"Invalid extra_args key '{}', "
"must be one of: {}".format(kwarg, ', '.join(allowed))
)

def _add_operation_defaults(self, bucket, extra_args):
Expand Down
7 changes: 3 additions & 4 deletions s3transfer/subscribers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,13 @@ def _validate_subscriber_methods(cls):
subscriber_method = getattr(cls, 'on_' + subscriber_type)
if not callable(subscriber_method):
raise InvalidSubscriberMethodError(
'Subscriber method %s must be callable.'
% subscriber_method
f'Subscriber method {subscriber_method} must be callable.'
)

if not accepts_kwargs(subscriber_method):
raise InvalidSubscriberMethodError(
'Subscriber method %s must accept keyword '
'arguments (**kwargs)' % subscriber_method
f'Subscriber method {subscriber_method} must accept keyword '
'arguments (**kwargs)'
)

def on_queued(self, future, **kwargs):
Expand Down
5 changes: 1 addition & 4 deletions s3transfer/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,7 @@ def __repr__(self):
main_kwargs_to_display = self._get_kwargs_with_params_to_include(
self._main_kwargs, params_to_display
)
return (
f'{self.__class__.__name__}(transfer_id='
f'{self._transfer_coordinator.transfer_id}, {main_kwargs_to_display})'
)
return f'{self.__class__.__name__}(transfer_id={self._transfer_coordinator.transfer_id}, {main_kwargs_to_display})'

@property
def transfer_id(self):
Expand Down
15 changes: 7 additions & 8 deletions s3transfer/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,7 @@ def acquire(self, tag, blocking=True):
"""
logger.debug("Acquiring %s", tag)
if not self._semaphore.acquire(blocking):
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'")

def release(self, tag, acquire_token):
"""Release the semaphore
Expand Down Expand Up @@ -692,7 +692,7 @@ def acquire(self, tag, blocking=True):
try:
if self._count == 0:
if not blocking:
raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
raise NoResourcesAvailable(f"Cannot acquire tag '{tag}'")
else:
while self._count == 0:
self._condition.wait()
Expand All @@ -714,7 +714,7 @@ def release(self, tag, acquire_token):
self._condition.acquire()
try:
if tag not in self._tag_sequences:
raise ValueError("Attempted to release unknown tag: %s" % tag)
raise ValueError(f"Attempted to release unknown tag: {tag}")
max_sequence = self._tag_sequences[tag]
if self._lowest_sequence[tag] == sequence_number:
# We can immediately process this request and free up
Expand All @@ -741,7 +741,7 @@ def release(self, tag, acquire_token):
else:
raise ValueError(
"Attempted to release unknown sequence number "
"%s for tag: %s" % (sequence_number, tag)
f"{sequence_number} for tag: {tag}"
)
finally:
self._condition.release()
Expand Down Expand Up @@ -779,13 +779,13 @@ def _adjust_for_chunksize_limits(self, current_chunksize):
if current_chunksize > self.max_size:
logger.debug(
"Chunksize greater than maximum chunksize. "
"Setting to %s from %s." % (self.max_size, current_chunksize)
f"Setting to {self.max_size} from {current_chunksize}."
)
return self.max_size
elif current_chunksize < self.min_size:
logger.debug(
"Chunksize less than minimum chunksize. "
"Setting to %s from %s." % (self.min_size, current_chunksize)
f"Setting to {self.min_size} from {current_chunksize}."
)
return self.min_size
else:
Expand All @@ -802,8 +802,7 @@ def _adjust_for_max_parts(self, current_chunksize, file_size):
if chunksize != current_chunksize:
logger.debug(
"Chunksize would result in the number of parts exceeding the "
"maximum. Setting to %s from %s."
% (chunksize, current_chunksize)
f"maximum. Setting to {chunksize} from {current_chunksize}."
)

return chunksize
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/install
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,4 @@ if __name__ == "__main__":
package = os.path.join('dist', wheel_dist)
if args.extras:
package = f"'{package}[{args.extras}]'"
run('pip install %s' % package)
run(f'pip install {package}')
2 changes: 1 addition & 1 deletion scripts/performance/benchmark
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ elif sys.platform == 'darwin':
else:
# TODO: Add support for windows. This would require figuring out what
# interface to use on windows.
raise RuntimeError('Script cannot be run on %s' % sys.platform)
raise RuntimeError(f'Script cannot be run on {sys.platform}')


def benchmark(args):
Expand Down
6 changes: 3 additions & 3 deletions scripts/performance/benchmark-download
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def human_readable_to_bytes(value):
try:
return int(value)
except ValueError:
raise ValueError("Invalid size value: %s" % value)
raise ValueError(f"Invalid size value: {value}")
else:
multiplier = SIZE_SUFFIX[suffix]
return int(value[: -len(suffix)]) * multiplier
Expand Down Expand Up @@ -97,8 +97,8 @@ def benchmark_download(args):
upload_file(client, temp_file, args.s3_bucket)

download_file_script = (
'./download-file --file-name %s --file-type %s --s3-bucket %s '
'--s3-key %s' % (temp_file, args.file_type, args.s3_bucket, s3_key)
f'./download-file --file-name {temp_file} --file-type {args.file_type} --s3-bucket {args.s3_bucket} '
f'--s3-key {s3_key}'
)
benchmark_args = ['./benchmark', download_file_script]
if args.output_file:
Expand Down
7 changes: 3 additions & 4 deletions scripts/performance/benchmark-upload
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def human_readable_to_bytes(value):
try:
return int(value)
except ValueError:
raise ValueError("Invalid size value: %s" % value)
raise ValueError(f"Invalid size value: {value}")
else:
multiplier = SIZE_SUFFIX[suffix]
return int(value[: -len(suffix)]) * multiplier
Expand All @@ -87,9 +87,8 @@ def benchmark_upload(args):
create_file(source_file, args.file_size)

upload_file_script = (
'./upload-file --file-name %s --file-type %s --s3-bucket %s '
'--s3-key %s'
% (source_file, args.file_type, args.s3_bucket, TEMP_KEY)
f'./upload-file --file-name {source_file} --file-type {args.file_type} --s3-bucket {args.s3_bucket} '
f'--s3-key {TEMP_KEY}'
)
benchmark_args = ['./benchmark', upload_file_script]
if args.output_file:
Expand Down
12 changes: 6 additions & 6 deletions scripts/performance/summarize
Original file line number Diff line number Diff line change
Expand Up @@ -168,13 +168,13 @@ class Summarizer:
table = [
[
'Total Time (seconds)',
'%.3f' % self.total_time,
f'{self.total_time:.3f}',
self.std_dev_total_time,
],
['Maximum Memory', h(self.max_memory), h(self.std_dev_max_memory)],
[
'Maximum CPU (percent)',
'%.1f' % self.max_cpu,
f'{self.max_cpu:.1f}',
self.std_dev_max_cpu,
],
[
Expand All @@ -184,14 +184,14 @@ class Summarizer:
],
[
'Average CPU (percent)',
'%.1f' % self.average_cpu,
f'{self.average_cpu:.1f}',
self.std_dev_average_cpu,
],
]
return tabulate(
table,
headers=[
'Metric over %s run(s)' % (self.total_files),
f'Metric over {self.total_files} run(s)',
'Mean',
'Standard Deviation',
],
Expand Down Expand Up @@ -238,8 +238,8 @@ class Summarizer:
def _validate_row(self, row, filename):
if not row:
raise RuntimeError(
'Row: %s could not be processed. The CSV file (%s) may be '
'empty.' % (row, filename)
f'Row: {row} could not be processed. The CSV file ({filename}) may be '
'empty.'
)

def process_data_row(self, row):
Expand Down
2 changes: 1 addition & 1 deletion scripts/stress/timeout
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import psutil

class TimeoutException(Exception):
def __init__(self, timeout_len):
msg = 'Script failed to complete within %s seconds' % timeout_len
msg = f'Script failed to complete within {timeout_len} seconds'
Exception.__init__(self, msg)


Expand Down
2 changes: 1 addition & 1 deletion tests/functional/test_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,7 @@ def add_upload_part_copy_responses_with_default_expected_params(
if extra_expected_params:
if 'ChecksumAlgorithm' in extra_expected_params:
name = extra_expected_params['ChecksumAlgorithm']
checksum_member = 'Checksum%s' % name.upper()
checksum_member = f'Checksum{name.upper()}'
response = upload_part_response['service_response']
response['CopyPartResult'][checksum_member] = 'sum%s==' % (
i + 1
Expand Down
2 changes: 1 addition & 1 deletion tests/functional/test_crt.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def setUp(self):
'myfile', self.expected_content, mode='wb'
)
self.expected_path = "/" + self.bucket + "/" + self.key
self.expected_host = "s3.%s.amazonaws.com" % (self.region)
self.expected_host = f"s3.{self.region}.amazonaws.com"
self.expected_s3express_host = f'{self.s3express_bucket}.s3express-usw2-az5.us-west-2.amazonaws.com'
self.expected_s3express_path = f'/{self.key}'
self.s3_request = mock.Mock(awscrt.s3.S3Request)
Expand Down
4 changes: 2 additions & 2 deletions tests/functional/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def test_download_temporary_file_does_not_exist(self):
# Make sure the file exists
self.assertTrue(os.path.exists(self.filename))
# Make sure the random temporary file does not exist
possible_matches = glob.glob('%s*' % self.filename + os.extsep)
possible_matches = glob.glob(f'{self.filename}*' + os.extsep)
self.assertEqual(possible_matches, [])

def test_download_for_fileobj(self):
Expand Down Expand Up @@ -201,7 +201,7 @@ def test_download_cleanup_on_failure(self):
future.result()
# Make sure the actual file and the temporary do not exist
# by globbing for the file and any of its extensions
possible_matches = glob.glob('%s*' % self.filename)
possible_matches = glob.glob(f'{self.filename}*')
self.assertEqual(possible_matches, [])

def test_download_with_nonexistent_directory(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/functional/test_processpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def test_cleans_up_tempfile_on_failure(self):
)
self.assertFalse(os.path.exists(self.filename))
# Any tempfile should have been erased as well
possible_matches = glob.glob('%s*' % self.filename + os.extsep)
possible_matches = glob.glob(f'{self.filename}*' + os.extsep)
self.assertEqual(possible_matches, [])

def test_validates_extra_args(self):
Expand Down
4 changes: 2 additions & 2 deletions tests/functional/test_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def collect_body(self, params, model, **kwargs):
data=params['Body'],
)
self.client.meta.events.emit(
'request-created.s3.%s' % model.name,
f'request-created.s3.{model.name}',
request=request,
operation_name=model.name,
)
Expand Down Expand Up @@ -398,7 +398,7 @@ def add_upload_part_responses_with_default_expected_params(
# If ChecksumAlgorithm is present stub the response checksums
if 'ChecksumAlgorithm' in extra_expected_params:
name = extra_expected_params['ChecksumAlgorithm']
checksum_member = 'Checksum%s' % name.upper()
checksum_member = f'Checksum{name.upper()}'
response = upload_part_response['service_response']
response[checksum_member] = 'sum%s==' % (i + 1)

Expand Down
2 changes: 1 addition & 1 deletion tests/integration/test_crt.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,6 @@ def test_download_cancel(self):
future.result()
self.assertEqual(err.name, 'AWS_ERROR_S3_CANCELED')

possible_matches = glob.glob('%s*' % download_path)
possible_matches = glob.glob(f'{download_path}*')
self.assertEqual(possible_matches, [])
self._assert_subscribers_called()
8 changes: 4 additions & 4 deletions tests/integration/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def test_large_download_exits_quicky_on_exception(self):
future.cancel()
raise RuntimeError(
"Download transfer did not start after waiting for "
"%s seconds." % timeout
f"{timeout} seconds."
)
# Raise an exception which should cause the preceding
# download to cancel and exit quickly
Expand All @@ -124,7 +124,7 @@ def test_large_download_exits_quicky_on_exception(self):

# Make sure the actual file and the temporary do not exist
# by globbing for the file and any of its extensions
possible_matches = glob.glob('%s*' % download_path)
possible_matches = glob.glob(f'{download_path}*')
self.assertEqual(possible_matches, [])

@skip_if_using_serial_implementation(
Expand Down Expand Up @@ -182,7 +182,7 @@ def test_many_files_exits_quicky_on_exception(self):

# For the transfer that did get cancelled, make sure the temporary
# file got removed.
possible_matches = glob.glob('%s*' % future.meta.call_args.fileobj)
possible_matches = glob.glob(f'{future.meta.call_args.fileobj}*')
self.assertEqual(possible_matches, [])

def test_progress_subscribers_on_download(self):
Expand Down Expand Up @@ -280,5 +280,5 @@ def test_download_to_special_file(self):
except Exception as e:
self.fail(
'Should have been able to download to /dev/null but received '
'following exception %s' % e
f'following exception {e}'
)
Loading

0 comments on commit 1c99dc4

Please sign in to comment.