Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Add S3ErrorHandler #2077

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions changelogs/fragments/2024-05-07-s3-error-handler.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
---
minor_changes:
- module_utils.s3 - added ``S3ErrorHandler`` and related ``AnsibleAWSError`` subclasses (https://github.com/ansible-collections/amazon.aws/pull/2077).
79 changes: 79 additions & 0 deletions plugins/module_utils/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)

import functools
import string
from urllib.parse import urlparse

Expand All @@ -14,13 +15,91 @@
HAS_MD5 = False

try:
# Beware, S3 is a "special" case, it sometimes catches botocore exceptions and
# re-raises them as boto3 exceptions.
import boto3
import botocore
except ImportError:
pass # Handled by the calling module


from ansible.module_utils.basic import to_text

from .botocore import is_boto3_error_code
from .errors import AWSErrorHandler
from .exceptions import AnsibleAWSError

IGNORE_S3_DROP_IN_EXCEPTIONS = ["XNotImplemented", "NotImplemented", "AccessControlListNotSupported"]


class AnsibleS3Error(AnsibleAWSError):
pass


class Sigv4Required(AnsibleS3Error):
pass


class AnsibleS3PermissionsError(AnsibleS3Error):
pass


class AnsibleS3SupportError(AnsibleS3Error):
pass


class S3ErrorHandler(AWSErrorHandler):
_CUSTOM_EXCEPTION = AnsibleS3Error

@classmethod
def _is_missing(cls):
return is_boto3_error_code(
[
"404",
"NoSuchTagSet",
"NoSuchTagSetError",
"ObjectLockConfigurationNotFoundError",
"NoSuchBucketPolicy",
"ServerSideEncryptionConfigurationNotFoundError",
"NoSuchBucket",
"NoSuchPublicAccessBlockConfiguration",
"OwnershipControlsNotFoundError",
"NoSuchOwnershipControls",
]
)

@classmethod
def common_error_handler(cls, description):
def wrapper(func):
@super(S3ErrorHandler, cls).common_error_handler(description)
@functools.wraps(func)
def handler(*args, **kwargs):
try:
return func(*args, **kwargs)
except is_boto3_error_code(["403", "AccessDenied"]) as e:
# FUTURE: there's a case to be made that this moves up into AWSErrorHandler
# for now, we'll handle this just for S3, but wait and see if it pops up in too
# many other places
raise AnsibleS3PermissionsError(
message=f"Failed to {description} (permission denied)", exception=e
) from e
except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS) as e: # pylint: disable=duplicate-except
# Unlike most of our modules, we attempt to handle non-AWS clouds. For read-only
# actions we sometimes need the ability to ignore unsupported features.
raise AnsibleS3SupportError(
message=f"Failed to {description} (not supported by cloud)", exception=e
) from e
except botocore.exceptions.EndpointConnectionError as e:
raise cls._CUSTOM_EXCEPTION(
message=f"Failed to {description} - Invalid endpoint provided", exception=e
) from e
except boto3.exceptions.Boto3Error as e:
raise cls._CUSTOM_EXCEPTION(message=f"Failed to {description}", exception=e) from e

return handler

return wrapper


def s3_head_objects(client, parts, bucket, obj, versionId):
args = {"Bucket": bucket, "Key": obj}
Expand Down
91 changes: 91 additions & 0 deletions tests/unit/module_utils/s3/test_endpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
#
# (c) 2021 Red Hat Inc.
#
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)

from unittest.mock import patch

import pytest

from ansible_collections.amazon.aws.plugins.module_utils import s3

mod_urlparse = "ansible_collections.amazon.aws.plugins.module_utils.s3.urlparse"


class UrlInfo:
def __init__(self, scheme=None, hostname=None, port=None):
self.hostname = hostname
self.scheme = scheme
self.port = port


@patch(mod_urlparse)
def test_is_fakes3_with_none_arg(m_urlparse):
m_urlparse.side_effect = SystemExit(1)
result = s3.is_fakes3(None)
assert not result
m_urlparse.assert_not_called()


@pytest.mark.parametrize(
"url,scheme,result",
[
("https://test-s3.amazon.com", "https", False),
("fakes3://test-s3.amazon.com", "fakes3", True),
("fakes3s://test-s3.amazon.com", "fakes3s", True),
],
)
@patch(mod_urlparse)
def test_is_fakes3(m_urlparse, url, scheme, result):
m_urlparse.return_value = UrlInfo(scheme=scheme)
assert result == s3.is_fakes3(url)
m_urlparse.assert_called_with(url)


@pytest.mark.parametrize(
"url,urlinfo,endpoint",
[
(
"fakes3://test-s3.amazon.com",
{"scheme": "fakes3", "hostname": "test-s3.amazon.com"},
{"endpoint": "http://test-s3.amazon.com:80", "use_ssl": False},
),
(
"fakes3://test-s3.amazon.com:8080",
{"scheme": "fakes3", "hostname": "test-s3.amazon.com", "port": 8080},
{"endpoint": "http://test-s3.amazon.com:8080", "use_ssl": False},
),
(
"fakes3s://test-s3.amazon.com",
{"scheme": "fakes3s", "hostname": "test-s3.amazon.com"},
{"endpoint": "https://test-s3.amazon.com:443", "use_ssl": True},
),
(
"fakes3s://test-s3.amazon.com:9096",
{"scheme": "fakes3s", "hostname": "test-s3.amazon.com", "port": 9096},
{"endpoint": "https://test-s3.amazon.com:9096", "use_ssl": True},
),
],
)
@patch(mod_urlparse)
def test_parse_fakes3_endpoint(m_urlparse, url, urlinfo, endpoint):
m_urlparse.return_value = UrlInfo(**urlinfo)
result = s3.parse_fakes3_endpoint(url)
assert endpoint == result
m_urlparse.assert_called_with(url)


@pytest.mark.parametrize(
"url,scheme,use_ssl",
[
("https://test-s3-ceph.amazon.com", "https", True),
("http://test-s3-ceph.amazon.com", "http", False),
],
)
@patch(mod_urlparse)
def test_parse_ceph_endpoint(m_urlparse, url, scheme, use_ssl):
m_urlparse.return_value = UrlInfo(scheme=scheme)
result = s3.parse_ceph_endpoint(url)
assert result == {"endpoint": url, "use_ssl": use_ssl}
m_urlparse.assert_called_with(url)
Original file line number Diff line number Diff line change
Expand Up @@ -188,108 +188,3 @@ def test_calculate_etag_failure(m_checksum_file, m_checksum_content, using_file)
with pytest.raises(SystemExit):
test_method(module, content, etag, client, s3bucket_name, s3bucket_object, version)
module.fail_json_aws.assert_called()


@pytest.mark.parametrize(
"bucket_name,result",
[
("docexamplebucket1", None),
("log-delivery-march-2020", None),
("my-hosted-content", None),
("docexamplewebsite.com", None),
("www.docexamplewebsite.com", None),
("my.example.s3.bucket", None),
("doc", None),
("doc_example_bucket", "invalid character(s) found in the bucket name"),
("DocExampleBucket", "invalid character(s) found in the bucket name"),
("doc-example-bucket-", "bucket names must begin and end with a letter or number"),
(
"this.string.has.more.than.63.characters.so.it.should.not.passed.the.validated",
"the length of an S3 bucket cannot exceed 63 characters",
),
("my", "the length of an S3 bucket must be at least 3 characters"),
],
)
def test_validate_bucket_name(bucket_name, result):
assert result == s3.validate_bucket_name(bucket_name)


mod_urlparse = "ansible_collections.amazon.aws.plugins.module_utils.s3.urlparse"


class UrlInfo:
def __init__(self, scheme=None, hostname=None, port=None):
self.hostname = hostname
self.scheme = scheme
self.port = port


@patch(mod_urlparse)
def test_is_fakes3_with_none_arg(m_urlparse):
m_urlparse.side_effect = SystemExit(1)
result = s3.is_fakes3(None)
assert not result
m_urlparse.assert_not_called()


@pytest.mark.parametrize(
"url,scheme,result",
[
("https://test-s3.amazon.com", "https", False),
("fakes3://test-s3.amazon.com", "fakes3", True),
("fakes3s://test-s3.amazon.com", "fakes3s", True),
],
)
@patch(mod_urlparse)
def test_is_fakes3(m_urlparse, url, scheme, result):
m_urlparse.return_value = UrlInfo(scheme=scheme)
assert result == s3.is_fakes3(url)
m_urlparse.assert_called_with(url)


@pytest.mark.parametrize(
"url,urlinfo,endpoint",
[
(
"fakes3://test-s3.amazon.com",
{"scheme": "fakes3", "hostname": "test-s3.amazon.com"},
{"endpoint": "http://test-s3.amazon.com:80", "use_ssl": False},
),
(
"fakes3://test-s3.amazon.com:8080",
{"scheme": "fakes3", "hostname": "test-s3.amazon.com", "port": 8080},
{"endpoint": "http://test-s3.amazon.com:8080", "use_ssl": False},
),
(
"fakes3s://test-s3.amazon.com",
{"scheme": "fakes3s", "hostname": "test-s3.amazon.com"},
{"endpoint": "https://test-s3.amazon.com:443", "use_ssl": True},
),
(
"fakes3s://test-s3.amazon.com:9096",
{"scheme": "fakes3s", "hostname": "test-s3.amazon.com", "port": 9096},
{"endpoint": "https://test-s3.amazon.com:9096", "use_ssl": True},
),
],
)
@patch(mod_urlparse)
def test_parse_fakes3_endpoint(m_urlparse, url, urlinfo, endpoint):
m_urlparse.return_value = UrlInfo(**urlinfo)
result = s3.parse_fakes3_endpoint(url)
assert endpoint == result
m_urlparse.assert_called_with(url)


@pytest.mark.parametrize(
"url,scheme,use_ssl",
[
("https://test-s3-ceph.amazon.com", "https", True),
("http://test-s3-ceph.amazon.com", "http", False),
],
)
@patch(mod_urlparse)
def test_parse_ceph_endpoint(m_urlparse, url, scheme, use_ssl):
m_urlparse.return_value = UrlInfo(scheme=scheme)
result = s3.parse_ceph_endpoint(url)
assert result == {"endpoint": url, "use_ssl": use_ssl}
m_urlparse.assert_called_with(url)
Loading
Loading