Skip to content

Commit

Permalink
fix s3 redirect (#827)
Browse files Browse the repository at this point in the history
  • Loading branch information
thehesiod committed Sep 1, 2020
1 parent bddbeee commit fedb9d2
Show file tree
Hide file tree
Showing 5 changed files with 137 additions and 5 deletions.
4 changes: 4 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
Changes
-------
1.1.1 (2020-08-31)
^^^^^^^^^^^^^^^^^^
* fix s3 region redirect bug #825

1.1.0 (2020-08-18)
^^^^^^^^^^^^^^^^^^
* bump botocore to 1.17.44
Expand Down
2 changes: 1 addition & 1 deletion aiobotocore/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .session import get_session, AioSession

__all__ = ['get_session', 'AioSession']
__version__ = '1.1.0'
__version__ = '1.1.1'
19 changes: 18 additions & 1 deletion aiobotocore/client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from botocore.awsrequest import prepare_request_dict
from botocore.client import logger, PaginatorDocstring, ClientCreator, \
BaseClient, ClientEndpointBridge
BaseClient, ClientEndpointBridge, S3ArnParamHandler, S3EndpointSetter
from botocore.exceptions import OperationNotPageableError
from botocore.history import get_global_history_recorder
from botocore.utils import get_service_module_name
Expand All @@ -9,6 +9,7 @@

from .paginate import AioPaginator
from .args import AioClientArgsCreator
from .utils import AioS3RegionRedirector
from . import waiter

history_recorder = get_global_history_recorder()
Expand Down Expand Up @@ -55,6 +56,22 @@ async def _create_client_class(self, service_name, service_model):
cls = type(str(class_name), tuple(bases), class_attributes)
return cls

def _register_s3_events(self, client, endpoint_bridge, endpoint_url,
client_config, scoped_config):
if client.meta.service_model.service_name != 's3':
return
AioS3RegionRedirector(endpoint_bridge, client).register()
S3ArnParamHandler().register(client.meta.events)
S3EndpointSetter(
endpoint_resolver=self._endpoint_resolver,
region=client.meta.region_name,
s3_config=client.meta.config.s3,
endpoint_url=endpoint_url,
partition=client.meta.partition
).register(client.meta.events)
self._set_s3_presign_signature_version(
client.meta, client_config, scoped_config)

def _get_client_args(self, service_model, region_name, is_secure,
endpoint_url, verify, credentials,
scoped_config, client_config, endpoint_bridge):
Expand Down
107 changes: 106 additions & 1 deletion aiobotocore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import aiohttp
import aiohttp.client_exceptions
from botocore.utils import ContainerMetadataFetcher, InstanceMetadataFetcher, \
IMDSFetcher, get_environ_proxies, BadIMDSRequestError
IMDSFetcher, get_environ_proxies, BadIMDSRequestError, S3RegionRedirector, \
ClientError
from botocore.exceptions import MetadataRetrievalError
import botocore.awsrequest

Expand Down Expand Up @@ -132,6 +133,110 @@ async def _get_credentials(self, role_name, token=None):
return json.loads(r.text)


class AioS3RegionRedirector(S3RegionRedirector):
async def redirect_from_error(self, request_dict, response, operation, **kwargs):
if response is None:
# This could be none if there was a ConnectionError or other
# transport error.
return

if self._is_s3_accesspoint(request_dict.get('context', {})):
logger.debug(
'S3 request was previously to an accesspoint, not redirecting.'
)
return

if request_dict.get('context', {}).get('s3_redirected'):
logger.debug(
'S3 request was previously redirected, not redirecting.')
return

error = response[1].get('Error', {})
error_code = error.get('Code')
response_metadata = response[1].get('ResponseMetadata', {})

# We have to account for 400 responses because
# if we sign a Head* request with the wrong region,
# we'll get a 400 Bad Request but we won't get a
# body saying it's an "AuthorizationHeaderMalformed".
is_special_head_object = (
error_code in ['301', '400'] and
operation.name == 'HeadObject'
)
is_special_head_bucket = (
error_code in ['301', '400'] and
operation.name == 'HeadBucket' and
'x-amz-bucket-region' in response_metadata.get('HTTPHeaders', {})
)
is_wrong_signing_region = (
error_code == 'AuthorizationHeaderMalformed' and
'Region' in error
)
is_redirect_status = response[0] is not None and \
response[0].status_code in [301, 302, 307]
is_permanent_redirect = error_code == 'PermanentRedirect'
if not any([is_special_head_object, is_wrong_signing_region,
is_permanent_redirect, is_special_head_bucket,
is_redirect_status]):
return

bucket = request_dict['context']['signing']['bucket']
client_region = request_dict['context'].get('client_region')
new_region = await self.get_bucket_region(bucket, response)

if new_region is None:
logger.debug(
"S3 client configured for region %s but the bucket %s is not "
"in that region and the proper region could not be "
"automatically determined." % (client_region, bucket))
return

logger.debug(
"S3 client configured for region %s but the bucket %s is in region"
" %s; Please configure the proper region to avoid multiple "
"unnecessary redirects and signing attempts." % (
client_region, bucket, new_region))
endpoint = self._endpoint_resolver.resolve('s3', new_region)
endpoint = endpoint['endpoint_url']

signing_context = {
'region': new_region,
'bucket': bucket,
'endpoint': endpoint
}
request_dict['context']['signing'] = signing_context

self._cache[bucket] = signing_context
self.set_request_url(request_dict, request_dict['context'])

request_dict['context']['s3_redirected'] = True

# Return 0 so it doesn't wait to retry
return 0

async def get_bucket_region(self, bucket, response):
# First try to source the region from the headers.
service_response = response[1]
response_headers = service_response['ResponseMetadata']['HTTPHeaders']
if 'x-amz-bucket-region' in response_headers:
return response_headers['x-amz-bucket-region']

# Next, check the error body
region = service_response.get('Error', {}).get('Region', None)
if region is not None:
return region

# Finally, HEAD the bucket. No other choice sadly.
try:
response = await self._client.head_bucket(Bucket=bucket)
headers = response['ResponseMetadata']['HTTPHeaders']
except ClientError as e:
headers = e.response['ResponseMetadata']['HTTPHeaders']

region = headers.get('x-amz-bucket-region', None)
return region


class AioContainerMetadataFetcher(ContainerMetadataFetcher):
def __init__(self, session=None, sleep=asyncio.sleep):
if session is None:
Expand Down
10 changes: 8 additions & 2 deletions tests/test_patches.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
generate_presigned_post, generate_db_auth_token, add_generate_db_auth_token
from botocore.hooks import EventAliaser, HierarchicalEmitter
from botocore.utils import ContainerMetadataFetcher, IMDSFetcher, \
InstanceMetadataFetcher
InstanceMetadataFetcher, S3RegionRedirector
from botocore.credentials import Credentials, RefreshableCredentials, \
CachedCredentialFetcher, AssumeRoleCredentialFetcher, EnvProvider, \
ContainerProvider, InstanceMetadataProvider, ProfileProviderBuilder, \
Expand Down Expand Up @@ -79,6 +79,7 @@
ClientCreator.create_client: {'ee63a3d60b5917879cb644c1b0aa3fe34538b915'},
ClientCreator._create_client_class: {'5e493d069eedbf314e40e12a7886bbdbcf194335'},
ClientCreator._get_client_args: {'555e1e41f93df7558c8305a60466681e3a267ef3'},
ClientCreator._register_s3_events: {'da3fc62a131d63964c8daa0f52124b092fd8f1b4'},

BaseClient._make_api_call: {'0c59329d4c8a55b88250b512b5e69239c42246fb'},
BaseClient._make_request: {'033a386f7d1025522bea7f2bbca85edc5c8aafd2'},
Expand Down Expand Up @@ -273,9 +274,14 @@

InstanceMetadataFetcher.retrieve_iam_role_credentials:
{'76737f6add82a1b9a0dc590cf10bfac0c7026a2e'},
InstanceMetadataFetcher._get_iam_role: {'80073d7adc9fb604bc6235af87241f5efc296ad7'},
InstanceMetadataFetcher._get_iam_role:
{'80073d7adc9fb604bc6235af87241f5efc296ad7'},
InstanceMetadataFetcher._get_credentials:
{'1a64f59a3ca70b83700bd14deeac25af14100d58'},
S3RegionRedirector.redirect_from_error:
{'f6f765431145a9bed8e73e6a3dbc7b0d6ae5f738'},
S3RegionRedirector.get_bucket_region:
{'b5bbc8b010576668dc2812d657c4b48af79e8f99'},

# waiter.py
NormalizedOperationMethod.__call__: {'79723632d023739aa19c8a899bc2b814b8ab12ff'},
Expand Down

0 comments on commit fedb9d2

Please sign in to comment.