Skip to content

Commit

Permalink
AnsibleAWSModule related cleanup - s3_bucket and redshift
Browse files Browse the repository at this point in the history
  • Loading branch information
tremble committed Jan 25, 2020
1 parent f1ec484 commit f42d3a9
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 23 deletions.
10 changes: 5 additions & 5 deletions lib/ansible/modules/cloud/amazon/redshift.py
Expand Up @@ -261,8 +261,9 @@
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
from ansible.module_utils.ec2 import ec2_argument_spec, snake_dict_to_camel_dict
pass # caught by AnsibleAWSModule

from ansible.module_utils.ec2 import snake_dict_to_camel_dict
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code


Expand Down Expand Up @@ -505,8 +506,7 @@ def modify_cluster(module, redshift):


def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
argument_spec = dict(
command=dict(choices=['create', 'facts', 'delete', 'modify'], required=True),
identifier=dict(required=True),
node_type=dict(choices=['ds1.xlarge', 'ds1.8xlarge', 'ds2.xlarge',
Expand Down Expand Up @@ -538,7 +538,7 @@ def main():
enhanced_vpc_routing=dict(type='bool', default=False),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300),
))
)

required_if = [
('command', 'delete', ['skip_final_cluster_snapshot']),
Expand Down
33 changes: 15 additions & 18 deletions lib/ansible/modules/cloud/amazon/s3_bucket.py
Expand Up @@ -171,13 +171,13 @@
from ansible.module_utils.six import string_types
from ansible.module_utils.basic import to_text
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
from ansible.module_utils.ec2 import compare_policies, ec2_argument_spec, boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_tag_list
from ansible.module_utils.ec2 import compare_policies, boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_tag_list
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn, AWSRetry

try:
from botocore.exceptions import BotoCoreError, ClientError, EndpointConnectionError, WaiterError
except ImportError:
pass # handled by AnsibleAWSModule
pass # caught by AnsibleAWSModule


def create_or_update_bucket(s3_client, module, location):
Expand Down Expand Up @@ -668,22 +668,19 @@ def get_s3_client(module, aws_connect_kwargs, location, ceph, s3_url):

def main():

argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
force=dict(default=False, type='bool'),
policy=dict(type='json'),
name=dict(required=True),
requester_pays=dict(default=False, type='bool'),
s3_url=dict(aliases=['S3_URL']),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='dict'),
purge_tags=dict(type='bool', default=True),
versioning=dict(type='bool'),
ceph=dict(default=False, type='bool'),
encryption=dict(choices=['none', 'AES256', 'aws:kms']),
encryption_key_id=dict()
)
argument_spec = dict(
force=dict(default=False, type='bool'),
policy=dict(type='json'),
name=dict(required=True),
requester_pays=dict(default=False, type='bool'),
s3_url=dict(aliases=['S3_URL']),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='dict'),
purge_tags=dict(type='bool', default=True),
versioning=dict(type='bool'),
ceph=dict(default=False, type='bool'),
encryption=dict(choices=['none', 'AES256', 'aws:kms']),
encryption_key_id=dict()
)

module = AnsibleAWSModule(
Expand Down

0 comments on commit f42d3a9

Please sign in to comment.