Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge branch 'release-v2.13.1'

* release-v2.13.1: (25 commits)
  Bumping version to v2.13.1
  Bumped Cloudsearch to SigV4.
  Fixes #1713 - Corrected a DDB bug where scanning over large set with a filter may not return all results.
  .
  .
  More notes.
  Fixes #1723 - A prior commit broke ``run_instances`` with block device mappings on EC2. This makes both EC2 & Autoscaling work with block device mappings.
  Test exposes AutoScaling ``launch_configuration`` bug when using block device mappings.
  Revert "Fixes #1709 - Cannot create Launch Configuration with Block Device Mappings"
  Fixes #1722 - Missed a place where ``dry_run`` is supposed to be supplied.
  More notes.
  Add unit tests for Image/get_all_images
  Add "billingProducts" support on Images
  Added notes for the recent commits.
  Added docs for apps that build on top of Boto.
  Addingw unit tests for boto.swf.layer2 types.
  New unit test for boto.swf.layer2.ActivityWorker
  New unit test for boto.swf.layer2.Decider
  Adding unit tests for boto.swf.layer2.Domain
  Fixed EC2 test failures due to the method signature changes from dry-run.
  ...
  • Loading branch information...
commit d7fa338a2fea84d1479fb916da3c579e3fbc1c61 2 parents 7b70eaf + dd1a502
@toastdriven toastdriven authored
Showing with 689 additions and 45 deletions.
  1. +2 −2 README.rst
  2. +1 −1  bin/s3put
  3. +1 −1  boto/__init__.py
  4. +1 −1  boto/cloudsearch/layer1.py
  5. +7 −1 boto/connection.py
  6. +13 −9 boto/dynamodb2/results.py
  7. +1 −1  boto/ec2/autoscale/__init__.py
  8. +10 −2 boto/ec2/blockdevicemapping.py
  9. +6 −5 boto/ec2/connection.py
  10. +12 −0 boto/ec2/image.py
  11. +2 −1  boto/ec2/securitygroup.py
  12. +3 −2 boto/gs/key.py
  13. +4 −0 boto/provider.py
  14. +13 −5 boto/s3/key.py
  15. +44 −0 docs/source/apps_built_on_boto.rst
  16. +2 −0  docs/source/index.rst
  17. +31 −0 docs/source/releasenotes/dev.rst
  18. +2 −2 tests/integration/gs/test_resumable_uploads.py
  19. +54 −0 tests/unit/dynamodb2/test_table.py
  20. +13 −3 tests/unit/ec2/test_address.py
  21. +54 −0 tests/unit/ec2/test_blockdevicemapping.py
  22. +132 −0 tests/unit/ec2/test_connection.py
  23. +22 −0 tests/unit/ec2/test_securitygroup.py
  24. +28 −9 tests/unit/ec2/test_volume.py
  25. 0  tests/unit/swf/__init__.py
  26. +73 −0 tests/unit/swf/test_layer2_actors.py
  27. +112 −0 tests/unit/swf/test_layer2_domain.py
  28. +46 −0 tests/unit/swf/test_layer2_types.py
View
4 README.rst
@@ -1,9 +1,9 @@
####
boto
####
-boto 2.13.0
+boto v2.13.1
-Released: 12-September-2013
+Released: 16-September-2013
.. image:: https://travis-ci.org/boto/boto.png?branch=develop
:target: https://travis-ci.org/boto/boto
View
2  bin/s3put
@@ -290,7 +290,7 @@ def main():
if o in ('-r', '--reduced'):
reduced = True
if o in ('--header'):
- (k, v) = a.split("=")
+ (k, v) = a.split("=", 1)
headers[k] = v
if o in ('--host'):
host = a
View
2  boto/__init__.py
@@ -36,7 +36,7 @@
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.13.0'
+__version__ = 'v2.13.1'
Version = __version__ # for backware compatibility
UserAgent = 'Boto/%s Python/%s %s/%s' % (
View
2  boto/cloudsearch/layer1.py
@@ -69,7 +69,7 @@ def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['sign-v2']
+ return ['hmac-v4']
def get_response(self, doc_path, action, params, path='/',
parent=None, verb='GET', list_marker=None):
View
8 boto/connection.py
@@ -517,6 +517,7 @@ def __init__(self, host, aws_access_key_id=None,
self.port = port
else:
self.port = PORTS_BY_SECURITY[is_secure]
+ self.host_header = None
# Timeout used to tell httplib how long to wait for socket timeouts.
# Default is to leave timeout unchanged, which will in turn result in
@@ -541,11 +542,13 @@ def __init__(self, host, aws_access_key_id=None,
aws_secret_access_key,
security_token)
- # Allow config file to override default host and port.
+ # Allow config file to override default host, port, and host header.
if self.provider.host:
self.host = self.provider.host
if self.provider.port:
self.port = self.provider.port
+ if self.provider.host_header:
+ self.host_header = self.provider.host_header
self._pool = ConnectionPool()
self._connection = (self.server_name(), self.is_secure)
@@ -942,6 +945,9 @@ def build_base_http_request(self, method, path, auth_path,
headers = {}
else:
headers = headers.copy()
+ if (self.host_header and
+ not boto.utils.find_matching_headers('host', headers)):
+ headers['host'] = self.host_header
host = host or self.host
if self.use_proxy:
if not auth_path:
View
22 boto/dynamodb2/results.py
@@ -58,6 +58,12 @@ def next(self):
self.fetch_more()
+ # It's possible that previous call to ``fetch_more`` may not return
+ # anything useful but there may be more results. Loop until we get
+ # something back, making sure we guard for no results left.
+ while not len(self._results) and self._results_left:
+ self.fetch_more()
+
if self._offset < len(self._results):
return self._results[self._offset]
else:
@@ -106,16 +112,11 @@ def fetch_more(self):
kwargs[self.first_key] = self._last_key_seen
results = self.the_callable(*args, **kwargs)
-
- if not len(results.get('results', [])):
- self._results_left = False
- return
-
- self._results.extend(results['results'])
+ new_results = results.get('results', [])
self._last_key_seen = results.get('last_key', None)
- if self._last_key_seen is None:
- self._results_left = False
+ if len(new_results):
+ self._results.extend(results['results'])
# Decrease the limit, if it's present.
if self.call_kwargs.get('limit'):
@@ -124,7 +125,10 @@ def fetch_more(self):
# results to look for
if 0 == self.call_kwargs['limit']:
self._results_left = False
-
+
+ if self._last_key_seen is None:
+ self._results_left = False
+
class BatchGetResultSet(ResultSet):
def __init__(self, *args, **kwargs):
View
2  boto/ec2/autoscale/__init__.py
@@ -225,7 +225,7 @@ def create_launch_configuration(self, launch_config):
if launch_config.ramdisk_id:
params['RamdiskId'] = launch_config.ramdisk_id
if launch_config.block_device_mappings:
- [x.build_list_params(params) for x in launch_config.block_device_mappings]
+ [x.autoscale_build_list_params(params) for x in launch_config.block_device_mappings]
if launch_config.security_groups:
self.build_list_params(params, launch_config.security_groups,
'SecurityGroups')
View
12 boto/ec2/blockdevicemapping.py
@@ -115,10 +115,18 @@ def endElement(self, name, value, connection):
elif name == 'item':
self[self.current_name] = self.current_value
- def build_list_params(self, params, prefix=''):
+ def ec2_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMapping' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def autoscale_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMappings.member' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def _build_list_params(self, params, prefix=''):
i = 1
for dev_name in self:
- pre = '%sBlockDeviceMappings.member.%d' % (prefix, i)
+ pre = '%s.%d' % (prefix, i)
params['%s.DeviceName' % pre] = dev_name
block_dev = self[dev_name]
if block_dev.ephemeral_name:
View
11 boto/ec2/connection.py
@@ -312,7 +312,7 @@ def register_image(self, name=None, description=None, image_location=None,
if root_device_name:
params['RootDeviceName'] = root_device_name
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
if dry_run:
params['DryRun'] = 'true'
rs = self.get_object('RegisterImage', params, ResultSet, verb='POST')
@@ -843,7 +843,7 @@ def run_instances(self, image_id, min_count=1, max_count=1,
if private_ip_address:
params['PrivateIpAddress'] = private_ip_address
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
if disable_api_termination:
params['DisableApiTermination'] = 'true'
if instance_initiated_shutdown_behavior:
@@ -1466,7 +1466,7 @@ def request_spot_instances(self, price, image_id, count=1, type='one-time',
if placement_group:
params['%s.Placement.GroupName' % ls] = placement_group
if block_device_map:
- block_device_map.build_list_params(params, '%s.' % ls)
+ block_device_map.ec2_build_list_params(params, '%s.' % ls)
if instance_profile_name:
params['%s.IamInstanceProfile.Name' % ls] = instance_profile_name
if instance_profile_arn:
@@ -4147,7 +4147,7 @@ def get_all_vmtypes(self):
params = {}
return self.get_list('DescribeVmTypes', params, [('euca:item', VmType)], verb='POST')
- def copy_image(self, source_region, source_image_id, name,
+ def copy_image(self, source_region, source_image_id, name=None,
description=None, client_token=None, dry_run=False):
"""
:type dry_run: bool
@@ -4157,8 +4157,9 @@ def copy_image(self, source_region, source_image_id, name,
params = {
'SourceRegion': source_region,
'SourceImageId': source_image_id,
- 'Name': name
}
+ if name is not None:
+ params['Name'] = name
if description is not None:
params['Description'] = description
if client_token is not None:
View
12 boto/ec2/image.py
@@ -32,6 +32,15 @@ def endElement(self, name, value, connection):
if name == 'productCode':
self.append(value)
+class BillingProducts(list):
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'billingProduct':
+ self.append(value)
+
class Image(TaggedEC2Object):
"""
Represents an EC2 Image
@@ -54,6 +63,7 @@ def __init__(self, connection=None):
self.name = None
self.description = None
self.product_codes = ProductCodes()
+ self.billing_products = BillingProducts()
self.block_device_mapping = None
self.root_device_type = None
self.root_device_name = None
@@ -73,6 +83,8 @@ def startElement(self, name, attrs, connection):
return self.block_device_mapping
elif name == 'productCodes':
return self.product_codes
+ elif name == 'billingProducts':
+ return self.billing_products
else:
return None
View
3  boto/ec2/securitygroup.py
@@ -348,7 +348,8 @@ def endElement(self, name, value, connection):
else:
setattr(self, name, value)
- def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None):
+ def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None,
+ dry_run=False):
grant = GroupOrCIDR(self)
grant.owner_id = owner_id
grant.group_id = group_id
View
5 boto/gs/key.py
@@ -308,9 +308,10 @@ def send_file(self, fp, headers=None, cb=None, num_cb=10,
chunked_transfer=chunked_transfer, size=size,
hash_algs=hash_algs)
- def delete(self):
+ def delete(self, headers=None):
return self.bucket.delete_key(self.name, version_id=self.version_id,
- generation=self.generation)
+ generation=self.generation,
+ headers=headers)
def add_email_grant(self, permission, email_address):
"""
View
4 boto/provider.py
@@ -168,6 +168,7 @@ def __init__(self, name, access_key=None, secret_key=None,
security_token=None):
self.host = None
self.port = None
+ self.host_header = None
self.access_key = access_key
self.secret_key = secret_key
self.security_token = security_token
@@ -185,6 +186,9 @@ def __init__(self, name, access_key=None, secret_key=None,
port_opt_name = '%s_port' % self.HostKeyMap[self.name]
if config.has_option('Credentials', port_opt_name):
self.port = config.getint('Credentials', port_opt_name)
+ host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
+ if config.has_option('Credentials', host_header_opt_name):
+ self.host_header = config.get('Credentials', host_header_opt_name)
def get_access_key(self):
if self._credentials_need_refresh():
View
18 boto/s3/key.py
@@ -503,20 +503,21 @@ def endElement(self, name, value, connection):
else:
setattr(self, name, value)
- def exists(self):
+ def exists(self, headers=None):
"""
Returns True if the key exists
:rtype: bool
:return: Whether the key exists on S3
"""
- return bool(self.bucket.lookup(self.name))
+ return bool(self.bucket.lookup(self.name, headers=headers))
- def delete(self):
+ def delete(self, headers=None):
"""
Delete this key from S3
"""
- return self.bucket.delete_key(self.name, version_id=self.version_id)
+ return self.bucket.delete_key(self.name, version_id=self.version_id,
+ headers=headers)
def get_metadata(self, name):
return self.metadata.get(name)
@@ -749,7 +750,14 @@ def sender(http_conn, method, path, data, headers):
raise provider.storage_data_error(
'Cannot retry failed request. fp does not support seeking.')
- http_conn.putrequest(method, path)
+ # If the caller explicitly specified host header, tell putrequest
+ # not to add a second host header. Similarly for accept-encoding.
+ skips = {}
+ if boto.utils.find_matching_headers('host', headers):
+ skips['skip_host'] = 1
+ if boto.utils.find_matching_headers('accept-encoding', headers):
+ skips['skip_accept_encoding'] = 1
+ http_conn.putrequest(method, path, **skips)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
View
44 docs/source/apps_built_on_boto.rst
@@ -0,0 +1,44 @@
+.. _apps_built_on_boto:
+
+==========================
+Applications Built On Boto
+==========================
+
+Many people have taken Boto and layered on additional functionality, then shared
+them with the community. This is a (partial) list of applications that use Boto.
+
+If you have an application or utility you've open-sourced that uses Boto &
+you'd like it listed here, please submit a `pull request`_ adding it!
+
+.. _`pull request`: https://github.com/boto/boto/pulls
+
+**botornado**
+ https://pypi.python.org/pypi/botornado
+ An asynchronous AWS client on Tornado. This is a dirty work to move boto
+ onto Tornado ioloop. Currently works with SQS and S3.
+
+**boto_rsync**
+ https://pypi.python.org/pypi/boto_rsync
+ boto-rsync is a rough adaptation of boto's s3put script which has been
+ reengineered to more closely mimic rsync. Its goal is to provide a familiar
+ rsync-like wrapper for boto's S3 and Google Storage interfaces.
+
+**boto_utils**
+ https://pypi.python.org/pypi/boto_utils
+ Command-line tools for interacting with Amazon Web Services, based on Boto.
+ Includes utils for S3, SES & Cloudwatch.
+
+**django-storages**
+ https://pypi.python.org/pypi/django-storages
+ A collection of storage backends for Django. Features the ``S3BotoStorage``
+ backend for storing media on S3.
+
+**mr.awsome**
+ https://pypi.python.org/pypi/mr.awsome
+ mr.awsome is a commandline-tool (aws) to manage and control Amazon
+ Webservice's EC2 instances. Once configured with your AWS key, you can
+ create, delete, monitor and ssh into instances, as well as perform scripted
+ tasks on them (via fabfiles). Examples are adding additional,
+ pre-configured webservers to a cluster (including updating the load
+ balancer), performing automated software deployments and creating backups -
+ each with just one call from the commandline.
View
2  docs/source/index.rst
@@ -91,6 +91,7 @@ Currently Supported Services
Additional Resources
--------------------
+* :doc:`Applications Built On Boto <apps_built_on_boto>`
* :doc:`Command Line Utilities <commandline>`
* :doc:`Boto Config Tutorial <boto_config_tut>`
* :doc:`Contributing to Boto <contributing>`
@@ -170,6 +171,7 @@ Release Notes
support_tut
dynamodb2_tut
migrations/dynamodb_v1_to_v2
+ apps_built_on_boto
ref/*
releasenotes/*
View
31 docs/source/releasenotes/dev.rst
@@ -0,0 +1,31 @@
+boto v2.xx.x
+============
+
+:date: 2013/xx/xx
+
+This release adds ____.
+
+
+Features
+--------
+
+* . (:issue:``, :sha:``)
+
+
+Bugfixes
+--------
+
+* Fixed test fallout from the EC2 dry-run change. (:sha:`2159456`)
+* Added tests for more of SWF's ``layer2``. (:issue:`1718`, :sha:`35fb741`,
+ :sha:`a84d401`, :sha:`1cf1641`, :sha:`a36429c`)
+* Changed EC2 to allow ``name`` to be optional in calls to ``copy_image``.
+ (:issue:`1672`, :sha:` 26285aa`)
+* Added ``billingProducts`` support to EC2 ``Image``. (:issue:`1703`,
+ :sha:`cccadaf`, :sha:`3914e91`)
+* Fixed a place where ``dry_run`` was handled in EC2. (:issue:`1722`,
+ :sha:`0a52c82`)
+* Fixed ``run_instances`` with a block device mapping. (:issue:`1723`,
+ :sha:`974743f`, :sha:`9049f05`, :sha:`d7edafc`)
+* Several documentation improvements/fixes:
+
+ * Added the "Apps Built On Boto" doc. (:sha:`3bd628c`)
View
4 tests/integration/gs/test_resumable_uploads.py
@@ -308,7 +308,7 @@ def test_upload_retains_metadata(self):
Tests that resumable upload correctly sets passed metadata
"""
res_upload_handler = ResumableUploadHandler()
- headers = {'Content-Type' : 'text/plain', 'Content-Encoding' : 'gzip',
+ headers = {'Content-Type' : 'text/plain', 'Content-Encoding' : 'utf8',
'x-goog-meta-abc' : 'my meta', 'x-goog-acl' : 'public-read'}
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
@@ -321,7 +321,7 @@ def test_upload_retains_metadata(self):
dst_key.get_contents_as_string())
dst_key.open_read()
self.assertEqual('text/plain', dst_key.content_type)
- self.assertEqual('gzip', dst_key.content_encoding)
+ self.assertEqual('utf8', dst_key.content_encoding)
self.assertTrue('abc' in dst_key.metadata)
self.assertEqual('my meta', str(dst_key.metadata['abc']))
acl = dst_key.get_acl()
View
54 tests/unit/dynamodb2/test_table.py
@@ -797,6 +797,60 @@ def none(limit=10):
results.to_call(none, limit=20)
self.assertRaises(StopIteration, results.next)
+ def test_iteration_sporadic_pages(self):
+ # Some pages have no/incomplete results but have a ``LastEvaluatedKey``
+ # (for instance, scans with filters), so we need to accommodate that.
+ def sporadic():
+ # A dict, because Python closures have read-only access to the
+ # reference itself.
+ count = {'value': -1}
+
+ def _wrapper(limit=10, exclusive_start_key=None):
+ count['value'] = count['value'] + 1
+
+ if count['value'] == 0:
+ # Full page.
+ return {
+ 'results': [
+ 'Result #0',
+ 'Result #1',
+ 'Result #2',
+ 'Result #3',
+ ],
+ 'last_key': 'page-1'
+ }
+ elif count['value'] == 1:
+ # Empty page but continue.
+ return {
+ 'results': [],
+ 'last_key': 'page-2'
+ }
+ elif count['value'] == 2:
+ # Final page.
+ return {
+ 'results': [
+ 'Result #4',
+ 'Result #5',
+ 'Result #6',
+ ],
+ }
+
+ return _wrapper
+
+ results = ResultSet()
+ results.to_call(sporadic(), limit=20)
+ # First page
+ self.assertEqual(results.next(), 'Result #0')
+ self.assertEqual(results.next(), 'Result #1')
+ self.assertEqual(results.next(), 'Result #2')
+ self.assertEqual(results.next(), 'Result #3')
+ # Second page (misses!)
+ # Moves on to the third page
+ self.assertEqual(results.next(), 'Result #4')
+ self.assertEqual(results.next(), 'Result #5')
+ self.assertEqual(results.next(), 'Result #6')
+ self.assertRaises(StopIteration, results.next)
+
def test_list(self):
self.assertEqual(list(self.results), [
'Hello john #0',
View
16 tests/unit/ec2/test_address.py
@@ -25,15 +25,25 @@ def test_endElement_sets_correct_attributes_with_values(self):
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
- self.address.connection.release_address.assert_called_with("192.168.1.1")
+ self.address.connection.release_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
- self.address.connection.associate_address.assert_called_with(1, "192.168.1.1")
+ self.address.connection.associate_address.assert_called_with(
+ 1,
+ "192.168.1.1",
+ dry_run=False
+ )
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
- self.address.connection.disassociate_address.assert_called_with("192.168.1.1")
+ self.address.connection.disassociate_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
if __name__ == "__main__":
unittest.main()
View
54 tests/unit/ec2/test_blockdevicemapping.py
@@ -1,8 +1,12 @@
import mock
import unittest
+from boto.ec2.connection import EC2Connection
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
+from tests.unit import AWSMockServiceTestCase
+
+
class BlockDeviceTypeTests(unittest.TestCase):
def setUp(self):
self.block_device_type = BlockDeviceType()
@@ -75,5 +79,55 @@ def test_endElement_with_name_item_sets_current_name_key_to_current_value(self):
self.block_device_mapping.endElement("item", "some item", None)
self.assertEqual(self.block_device_mapping["some name"], "some value")
+
+class TestLaunchConfiguration(AWSMockServiceTestCase):
+ connection_class = EC2Connection
+
+ def default_body(self):
+ # This is a dummy response
+ return """
+ <DescribeLaunchConfigurationsResponse>
+ </DescribeLaunchConfigurationsResponse>
+ """
+
+ def test_run_instances_block_device_mapping(self):
+ # Same as the test in ``unit/ec2/autoscale/test_group.py:TestLaunchConfiguration``,
+ # but with modified request parameters (due to a mismatch between EC2 &
+ # Autoscaling).
+ self.set_http_response(status_code=200)
+ dev_sdf = BlockDeviceType(snapshot_id='snap-12345')
+ dev_sdg = BlockDeviceType(snapshot_id='snap-12346')
+
+ bdm = BlockDeviceMapping()
+ bdm['/dev/sdf'] = dev_sdf
+ bdm['/dev/sdg'] = dev_sdg
+
+ response = self.service_connection.run_instances(
+ image_id='123456',
+ instance_type='m1.large',
+ security_groups=['group1', 'group2'],
+ block_device_map=bdm
+ )
+
+ self.assert_request_parameters({
+ 'Action': 'RunInstances',
+ 'BlockDeviceMapping.1.DeviceName': '/dev/sdf',
+ 'BlockDeviceMapping.1.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345',
+ 'BlockDeviceMapping.2.DeviceName': '/dev/sdg',
+ 'BlockDeviceMapping.2.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.2.Ebs.SnapshotId': 'snap-12346',
+ 'ImageId': '123456',
+ 'InstanceType': 'm1.large',
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroup.1': 'group1',
+ 'SecurityGroup.2': 'group2',
+ }, ignore_params_values=[
+ 'Version', 'AWSAccessKeyId', 'SignatureMethod', 'SignatureVersion',
+ 'Timestamp'
+ ])
+
+
if __name__ == "__main__":
unittest.main()
View
132 tests/unit/ec2/test_connection.py
@@ -484,6 +484,47 @@ def test_copy_snapshot(self):
'SignatureVersion', 'Timestamp',
'Version'])
+class TestCopyImage(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+ <CopyImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
+ <requestId>request_id</requestId>
+ <imageId>ami-copied-id</imageId>
+ </CopyImageResponse>
+ """
+
+ def test_copy_image(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ 'name', 'description', 'client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'Name': 'name',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+ def test_copy_image_without_name(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ description='description',
+ client_token='client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
class TestAccountAttributes(TestEC2ConnectionBase):
def default_body(self):
@@ -630,6 +671,97 @@ def test_attachment_has_device_index(self):
self.assertEqual(5, parsed[0].attachment.device_index)
+class TestGetAllImages(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+<DescribeImagesResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
+ <requestId>e32375e8-4ac3-4099-a8bf-3ec902b9023e</requestId>
+ <imagesSet>
+ <item>
+ <imageId>ami-abcd1234</imageId>
+ <imageLocation>111111111111/windows2008r2-hvm-i386-20130702</imageLocation>
+ <imageState>available</imageState>
+ <imageOwnerId>111111111111</imageOwnerId>
+ <isPublic>false</isPublic>
+ <architecture>i386</architecture>
+ <imageType>machine</imageType>
+ <platform>windows</platform>
+ <viridianEnabled>true</viridianEnabled>
+ <name>Windows Test</name>
+ <description>Windows Test Description</description>
+ <billingProducts>
+ <item>
+ <billingProduct>bp-6ba54002</billingProduct>
+ </item>
+ </billingProducts>
+ <rootDeviceType>ebs</rootDeviceType>
+ <rootDeviceName>/dev/sda1</rootDeviceName>
+ <blockDeviceMapping>
+ <item>
+ <deviceName>/dev/sda1</deviceName>
+ <ebs>
+ <snapshotId>snap-abcd1234</snapshotId>
+ <volumeSize>30</volumeSize>
+ <deleteOnTermination>true</deleteOnTermination>
+ <volumeType>standard</volumeType>
+ </ebs>
+ </item>
+ <item>
+ <deviceName>xvdb</deviceName>
+ <virtualName>ephemeral0</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdc</deviceName>
+ <virtualName>ephemeral1</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdd</deviceName>
+ <virtualName>ephemeral2</virtualName>
+ </item>
+ <item>
+ <deviceName>xvde</deviceName>
+ <virtualName>ephemeral3</virtualName>
+ </item>
+ </blockDeviceMapping>
+ <virtualizationType>hvm</virtualizationType>
+ <hypervisor>xen</hypervisor>
+ </item>
+ </imagesSet>
+</DescribeImagesResponse>"""
+
+ def test_get_all_images(self):
+ self.set_http_response(status_code=200)
+ parsed = self.ec2.get_all_images()
+ self.assertEquals(1, len(parsed))
+ self.assertEquals("ami-abcd1234", parsed[0].id)
+ self.assertEquals("111111111111/windows2008r2-hvm-i386-20130702", parsed[0].location)
+ self.assertEquals("available", parsed[0].state)
+ self.assertEquals("111111111111", parsed[0].ownerId)
+ self.assertEquals("111111111111", parsed[0].owner_id)
+ self.assertEquals(False, parsed[0].is_public)
+ self.assertEquals("i386", parsed[0].architecture)
+ self.assertEquals("machine", parsed[0].type)
+ self.assertEquals(None, parsed[0].kernel_id)
+ self.assertEquals(None, parsed[0].ramdisk_id)
+ self.assertEquals(None, parsed[0].owner_alias)
+ self.assertEquals("windows", parsed[0].platform)
+ self.assertEquals("Windows Test", parsed[0].name)
+ self.assertEquals("Windows Test Description", parsed[0].description)
+ self.assertEquals("ebs", parsed[0].root_device_type)
+ self.assertEquals("/dev/sda1", parsed[0].root_device_name)
+ self.assertEquals("hvm", parsed[0].virtualization_type)
+ self.assertEquals("xen", parsed[0].hypervisor)
+ self.assertEquals(None, parsed[0].instance_lifecycle)
+
+ # 1 billing product parsed into a list
+ self.assertEquals(1, len(parsed[0].billing_products))
+ self.assertEquals("bp-6ba54002", parsed[0].billing_products[0])
+
+ # Just verify length, there is already a block_device_mapping test
+ self.assertEquals(5, len(parsed[0].block_device_mapping))
+
+ # TODO: No tests for product codes?
+
class TestModifyInterfaceAttribute(TestEC2ConnectionBase):
def default_body(self):
View
22 tests/unit/ec2/test_securitygroup.py
@@ -6,6 +6,8 @@
import mock
from boto.ec2.connection import EC2Connection
+from boto.ec2.securitygroup import SecurityGroup
+
DESCRIBE_SECURITY_GROUP = r"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeSecurityGroupsResponse xmlns="http://ec2.amazonaws.com/doc/2013-06-15/">
@@ -182,3 +184,23 @@ def test_get_instances(self):
self.assertEqual(1, len(instances))
self.assertEqual(groups[0].id, instances[0].groups[0].id)
+
+
+class SecurityGroupTest(unittest.TestCase):
+ def test_add_rule(self):
+ sg = SecurityGroup()
+ self.assertEqual(len(sg.rules), 0)
+
+ # Regression: ``dry_run`` was being passed (but unhandled) before.
+ sg.add_rule(
+ ip_protocol='http',
+ from_port='80',
+ to_port='8080',
+ src_group_name='groupy',
+ src_group_owner_id='12345',
+ cidr_ip='10.0.0.1',
+ src_group_group_id='54321',
+ dry_run=False
+ )
+ self.assertEqual(len(sg.rules), 1)
+
View
37 tests/unit/ec2/test_volume.py
@@ -38,7 +38,12 @@ def setUp(self):
def test_startElement_calls_TaggedEC2Object_startElement_with_correct_args(self, startElement):
volume = Volume()
volume.startElement("some name", "some attrs", None)
- startElement.assert_called_with(volume, "some name", "some attrs", None)
+ startElement.assert_called_with(
+ volume,
+ "some name",
+ "some attrs",
+ None
+ )
@mock.patch("boto.ec2.volume.TaggedEC2Object.startElement")
def test_startElement_retval_not_None_returns_correct_thing(self, startElement):
@@ -120,43 +125,57 @@ def test_update_returns_status(self):
def test_delete_calls_delete_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.delete()
- self.volume_one.connection.delete_volume.assert_called_with(1)
+ self.volume_one.connection.delete_volume.assert_called_with(
+ 1,
+ dry_run=False
+ )
def test_attach_calls_attach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.attach("instance_id", "/dev/null")
- self.volume_one.connection.attach_volume.assert_called_with(1, "instance_id", "/dev/null")
+ self.volume_one.connection.attach_volume.assert_called_with(
+ 1,
+ "instance_id",
+ "/dev/null",
+ dry_run=False
+ )
def test_detach_calls_detach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach()
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", False)
+ 1, 2, "/dev/null", False, dry_run=False)
def test_detach_with_no_attach_data(self):
self.volume_two.connection = mock.Mock()
self.volume_two.detach()
self.volume_two.connection.detach_volume.assert_called_with(
- 1, None, None, False)
+ 1, None, None, False, dry_run=False)
def test_detach_with_force_calls_detach_volume_with_force(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach(True)
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", True)
+ 1, 2, "/dev/null", True, dry_run=False)
def test_create_snapshot_calls_connection_create_snapshot(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot()
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, None)
+ 1,
+ None,
+ dry_run=False
+ )
def test_create_snapshot_with_description(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot("some description")
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, "some description")
+ 1,
+ "some description",
+ dry_run=False
+ )
def test_volume_state_returns_status(self):
retval = self.volume_one.volume_state()
@@ -186,7 +205,7 @@ def test_snapshots__with_owner_and_restorable_by(self):
self.volume_one.connection.get_all_snapshots.return_value = []
self.volume_one.snapshots("owner", "restorable_by")
self.volume_one.connection.get_all_snapshots.assert_called_with(
- owner="owner", restorable_by="restorable_by")
+ owner="owner", restorable_by="restorable_by", dry_run=False)
class AttachmentSetTests(unittest.TestCase):
def check_that_attribute_has_been_set(self, name, value, attribute):
View
0  tests/unit/swf/__init__.py
No changes.
View
73 tests/unit/swf/test_layer2_actors.py
@@ -0,0 +1,73 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Decider, ActivityWorker
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestActors(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.worker = ActivityWorker(name='test-worker', domain='test', task_list='test_list')
+ self.decider = Decider(name='test-worker', domain='test', task_list='test_list')
+ self.worker._swf = Mock()
+ self.decider._swf = Mock()
+
+ def test_decider_pass_tasktoken(self):
+ self.decider._swf.poll_for_decision_task.return_value = {
+ 'events': [{'eventId': 1,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'WorkflowExecutionStarted',
+ 'workflowExecutionStartedEventAttributes': {
+ 'childPolicy': 'TERMINATE',
+ 'executionStartToCloseTimeout': '3600',
+ 'parentInitiatedEventId': 0,
+ 'taskList': {'name': 'test_list'},
+ 'taskStartToCloseTimeout': '123',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}},
+ {'decisionTaskScheduledEventAttributes':
+ {'startToCloseTimeout': '123',
+ 'taskList': {'name': 'test_list'}},
+ 'eventId': 2,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'DecisionTaskScheduled'},
+ {'decisionTaskStartedEventAttributes': {'scheduledEventId': 2},
+ 'eventId': 3, 'eventTimestamp': 1379019495.585,
+ 'eventType': 'DecisionTaskStarted'}],
+ 'previousStartedEventId': 0, 'startedEventId': 3,
+ 'taskToken': 'my_specific_task_token',
+ 'workflowExecution': {'runId': 'fwr243dsa324132jmflkfu0943tr09=',
+ 'workflowId': 'test_workflow_name-v1-1379019427'},
+ 'workflowType': {'name': 'test_workflow_name', 'version': 'v1'}}
+
+ self.decider.poll()
+ self.decider.complete()
+
+ self.decider._swf.respond_decision_task_completed.assert_called_with('my_specific_task_token', None)
+ self.assertEqual('my_specific_task_token', self.decider.last_tasktoken)
+
+ def test_worker_pass_tasktoken(self):
+ task_token = 'worker_task_token'
+ self.worker._swf.poll_for_activity_task.return_value = {
+ 'activityId': 'SomeActivity-1379020713',
+ 'activityType': {'name': 'SomeActivity', 'version': '1.0'},
+ 'startedEventId': 6,
+ 'taskToken': task_token,
+ 'workflowExecution': {'runId': '12T026NzGK5c4eMti06N9O3GHFuTDaNyA+8LFtoDkAwfE=',
+ 'workflowId': 'MyWorkflow-1.0-1379020705'}}
+
+ self.worker.poll()
+
+ self.worker.cancel(details='Cancelling!')
+ self.worker.complete(result='Done!')
+ self.worker.fail(reason='Failure!')
+ self.worker.heartbeat()
+
+ self.worker._swf.respond_activity_task_canceled.assert_called_with(task_token, 'Cancelling!')
+ self.worker._swf.respond_activity_task_completed.assert_called_with(task_token, 'Done!')
+ self.worker._swf.respond_activity_task_failed.assert_called_with(task_token, None, 'Failure!')
+ self.worker._swf.record_activity_task_heartbeat.assert_called_with(task_token, None)
+
+if __name__ == '__main__':
+ unittest.main()
View
112 tests/unit/swf/test_layer2_domain.py
@@ -0,0 +1,112 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Domain, ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestDomain(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.domain = Domain(name='test-domain', description='My test domain')
+ self.domain.aws_access_key_id = 'inheritable access key'
+ self.domain.aws_secret_access_key = 'inheritable secret key'
+
+ def test_domain_instantiation(self):
+ self.assertEquals('test-domain', self.domain.name)
+ self.assertEquals('My test domain', self.domain.description)
+
+ def test_domain_list_activities(self):
+ self.domain._swf.list_activity_types.return_value = {
+ 'typeInfos': [{'activityType': {'name': 'DeleteLocalFile',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.235,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'DoUpdate', 'version': 'test'},
+ 'creationDate': 1333463734.528,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'GrayscaleTransform',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.18,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Download', 'version': '1.0'},
+ 'creationDate': 1332853651.264,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Upload', 'version': '1.0'},
+ 'creationDate': 1332853651.314,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'SepiaTransform', 'version': '1.1'},
+ 'creationDate': 1333373797.734,
+ 'status': 'REGISTERED'}]}
+
+ expected_names = ('DeleteLocalFile', 'GrayscaleTransform', 'S3Download',
+ 'S3Upload', 'SepiaTransform', 'DoUpdate')
+
+ activity_types = self.domain.activities()
+ self.assertEquals(6, len(activity_types))
+ for activity_type in activity_types:
+ self.assertIsInstance(activity_type, ActivityType)
+ self.assertTrue(activity_type.name in expected_names)
+
+ def test_domain_list_workflows(self):
+ self.domain._swf.list_workflow_types.return_value = {
+ 'typeInfos': [{'creationDate': 1332853651.136,
+ 'description': 'Image processing sample workflow type',
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'ProcessFile', 'version': '1.0'}},
+ {'creationDate': 1333551719.89,
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}]}
+ expected_names = ('ProcessFile', 'test_workflow_name')
+
+ workflow_types = self.domain.workflows()
+ self.assertEquals(2, len(workflow_types))
+ for workflow_type in workflow_types:
+ self.assertIsInstance(workflow_type, WorkflowType)
+ self.assertTrue(workflow_type.name in expected_names)
+ self.assertEquals(self.domain.aws_access_key_id, workflow_type.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, workflow_type.aws_secret_access_key)
+ self.assertEquals(self.domain.name, workflow_type.domain)
+
+ def test_domain_list_executions(self):
+ self.domain._swf.list_open_workflow_executions.return_value = {
+ 'executionInfos': [{'cancelRequested': False,
+ 'execution': {'runId': '12OeDTyoD27TDaafViz/QIlCHrYzspZmDgj0coIfjm868=',
+ 'workflowId': 'ProcessFile-1.0-1378933928'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933928.676,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12GwBkx4hH6t2yaIh8LYxy5HyCM6HcyhDKePJCg0/ciJk=',
+ 'workflowId': 'ProcessFile-1.0-1378933927'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.919,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12oRG3vEWrQ7oYBV+Bqi33Fht+ZRCYTt+tOdn5kLVcwKI=',
+ 'workflowId': 'ProcessFile-1.0-1378933926'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.04,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12qrdcpYmad2cjnqJcM4Njm3qrCGvmRFR1wwQEt+a2ako=',
+ 'workflowId': 'ProcessFile-1.0-1378933874'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933874.956,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}}]}
+
+ executions = self.domain.executions()
+ self.assertEquals(4, len(executions))
+ for wf_execution in executions:
+ self.assertIsInstance(wf_execution, WorkflowExecution)
+ self.assertEquals(self.domain.aws_access_key_id, wf_execution.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, wf_execution.aws_secret_access_key)
+ self.assertEquals(self.domain.name, wf_execution.domain)
+
+if __name__ == '__main__':
+ unittest.main()
View
46 tests/unit/swf/test_layer2_types.py
@@ -0,0 +1,46 @@
+import boto.swf.layer2
+from boto.swf.layer2 import ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock, ANY
+
+
+class TestTypes(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+
+ def test_workflow_type_register_defaults(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ wf_type.register()
+
+ wf_type._swf.register_workflow_type.assert_called_with('test', 'name', '1',
+ default_execution_start_to_close_timeout=ANY,
+ default_task_start_to_close_timeout=ANY,
+ default_child_policy=ANY
+ )
+
+ def test_activity_type_register_defaults(self):
+ act_type = ActivityType(name='name', domain='test', version='1')
+ act_type.register()
+
+ act_type._swf.register_activity_type.assert_called_with('test', 'name', '1',
+ default_task_heartbeat_timeout=ANY,
+ default_task_schedule_to_close_timeout=ANY,
+ default_task_schedule_to_start_timeout=ANY,
+ default_task_start_to_close_timeout=ANY
+ )
+
+ def test_workflow_type_start_execution(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ run_id = '122aJcg6ic7MRAkjDRzLBsqU/R49qt5D0LPHycT/6ArN4='
+ wf_type._swf.start_workflow_execution.return_value = {'runId': run_id}
+
+ execution = wf_type.start(task_list='hello_world')
+
+ self.assertIsInstance(execution, WorkflowExecution)
+ self.assertEquals(wf_type.name, execution.name)
+ self.assertEquals(wf_type.version, execution.version)
+ self.assertEquals(run_id, execution.runId)
+
+if __name__ == '__main__':
+ unittest.main()
Please sign in to comment.
Something went wrong with that request. Please try again.