Skip to content

Commit

Permalink
Merge branch 'release-2.9.1'
Browse files Browse the repository at this point in the history
* release-2.9.1:
  Bumping version to 2.9.1
  Support docs & release notes.
  Initial Support API addition.
  Change num_retries default for resumable download handler to be 6, to be consistent w/ num_retries elsewhere in the code
  Added a ``connect_redshift`` function for easier access to a ``RedShiftConnection``.
  Fixed the error type checking.
  Updated DynamoDB v2 to incorporate retries & checksums.
  Allow port override in boto config
  Fix typo bug in autoscale tutorial.
  Trying to make the docs around the count param a bit more clear.
  Add clarifying comment about using OrdinaryCallingFormat in storage_uri.
  Fixed missing raise introduced by 57a4189 (fixes resumable download test failures)
  Change calling_format override in storage_uri to be gs-specific.
  Add eu-west-1 endpoint for Redshift.
  Fixing bogus docs regarding return value of import_key_pair.
  Added back get_upload_id().
  Bumped the version in README.
  Add dev prefix back to version in dev branch
  • Loading branch information
jamesls committed May 1, 2013
2 parents 89f4947 + 38f8bec commit a9e834a
Show file tree
Hide file tree
Showing 25 changed files with 1,136 additions and 26 deletions.
4 changes: 2 additions & 2 deletions README.rst
@@ -1,8 +1,8 @@
####
boto
####
boto 2.8.0
31-Jan-2013
boto 2.9.1
30-Apr-2013

.. image:: https://secure.travis-ci.org/boto/boto.png?branch=develop
:target: https://secure.travis-ci.org/boto/boto
Expand Down
44 changes: 43 additions & 1 deletion boto/__init__.py
Expand Up @@ -36,7 +36,7 @@
import urlparse
from boto.exception import InvalidUriError

__version__ = '2.9.0'
__version__ = '2.9.1'
Version = __version__ # for backware compatibility

UserAgent = 'Boto/%s (%s)' % (__version__, sys.platform)
Expand Down Expand Up @@ -674,6 +674,48 @@ def connect_opsworks(aws_access_key_id=None,
**kwargs)


def connect_redshift(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.redshift.layer1.RedshiftConnection`
:return: A connection to Amazon's Redshift service
"""
from boto.redshift.layer1 import RedshiftConnection
return RedshiftConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)


def connect_support(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.support.layer1.SupportConnection`
:return: A connection to Amazon's Support service
"""
from boto.support.layer1 import SupportConnection
return SupportConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)


def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
Expand Down
4 changes: 3 additions & 1 deletion boto/connection.py
Expand Up @@ -539,9 +539,11 @@ def __init__(self, host, aws_access_key_id=None,
aws_secret_access_key,
security_token)

# allow config file to override default host
# Allow config file to override default host and port.
if self.provider.host:
self.host = self.provider.host
if self.provider.port:
self.port = self.provider.port

self._pool = ConnectionPool()
self._connection = (self.server_name(), self.is_secure)
Expand Down
6 changes: 6 additions & 0 deletions boto/dynamodb/layer2.py
Expand Up @@ -681,6 +681,9 @@ def query(self, table, hash_key, range_key_condition=None,
:param count: If True, Amazon DynamoDB returns a total
number of items for the Query operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
Expand Down Expand Up @@ -769,6 +772,9 @@ def scan(self, table, scan_filter=None,
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
Expand Down
6 changes: 6 additions & 0 deletions boto/dynamodb/table.py
Expand Up @@ -435,6 +435,9 @@ def query(self, hash_key, *args, **kw):
:param count: If True, Amazon DynamoDB returns a total
number of items for the Query operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type item_class: Class
Expand Down Expand Up @@ -494,6 +497,9 @@ def scan(self, *args, **kw):
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
Expand Down
4 changes: 4 additions & 0 deletions boto/dynamodb2/exceptions.py
Expand Up @@ -46,5 +46,9 @@ class InternalServerError(JSONResponseError):
pass


class ValidationException(JSONResponseError):
pass


class ItemCollectionSizeLimitExceededException(JSONResponseError):
pass
65 changes: 61 additions & 4 deletions boto/dynamodb2/layer1.py
Expand Up @@ -19,6 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from binascii import crc32

import json
import boto
Expand All @@ -30,9 +31,10 @@

class DynamoDBConnection(AWSQueryConnection):
"""
Amazon DynamoDB **Overview**
This is the Amazon DynamoDB API Reference. This guide provides
descriptions and samples of the Amazon DynamoDB API.
Amazon DynamoDB is a fast, highly scalable, highly available,
cost-effective non-relational database service. Amazon DynamoDB
removes traditional scalability limitations on data storage while
maintaining low latency and predictable performance.
"""
APIVersion = "2012-08-10"
DefaultRegionName = "us-east-1"
Expand All @@ -49,17 +51,23 @@ class DynamoDBConnection(AWSQueryConnection):
"ResourceNotFoundException": exceptions.ResourceNotFoundException,
"InternalServerError": exceptions.InternalServerError,
"ItemCollectionSizeLimitExceededException": exceptions.ItemCollectionSizeLimitExceededException,
"ValidationException": exceptions.ValidationException,
}

NumberRetries = 10


def __init__(self, **kwargs):
region = kwargs.pop('region', None)
validate_checksums = kwargs.pop('validate_checksums', True)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
kwargs['host'] = region.endpoint
AWSQueryConnection.__init__(self, **kwargs)
self.region = region
self._validate_checksums = boto.config.getbool(
'DynamoDB', 'validate_checksums', validate_checksums)

def _required_auth_capability(self):
return ['hmac-v4']
Expand Down Expand Up @@ -1392,7 +1400,8 @@ def make_request(self, action, body):
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
override_num_retries=self.NumberRetries,
retry_handler=self._retry_handler)
response_body = response.read()
boto.log.debug(response_body)
if response.status == 200:
Expand All @@ -1405,3 +1414,51 @@ def make_request(self, action, body):
raise exception_class(response.status, response.reason,
body=json_body)

def _retry_handler(self, response, i, next_sleep):
status = None
if response.status == 400:
response_body = response.read()
boto.log.debug(response_body)
data = json.loads(response_body)
if 'ProvisionedThroughputExceededException' in data.get('__type'):
self.throughput_exceeded_events += 1
msg = "%s, retry attempt %s" % (
'ProvisionedThroughputExceededException',
i
)
next_sleep = self._exponential_time(i)
i += 1
status = (msg, i, next_sleep)
if i == self.NumberRetries:
# If this was our last retry attempt, raise
# a specific error saying that the throughput
# was exceeded.
raise exceptions.ProvisionedThroughputExceededException(
response.status, response.reason, data)
elif 'ConditionalCheckFailedException' in data.get('__type'):
raise exceptions.ConditionalCheckFailedException(
response.status, response.reason, data)
elif 'ValidationException' in data.get('__type'):
raise exceptions.ValidationException(
response.status, response.reason, data)
else:
raise self.ResponseError(response.status, response.reason,
data)
expected_crc32 = response.getheader('x-amz-crc32')
if self._validate_checksums and expected_crc32 is not None:
boto.log.debug('Validating crc32 checksum for body: %s',
response.read())
actual_crc32 = crc32(response.read()) & 0xffffffff
expected_crc32 = int(expected_crc32)
if actual_crc32 != expected_crc32:
msg = ("The calculated checksum %s did not match the expected "
"checksum %s" % (actual_crc32, expected_crc32))
status = (msg, i + 1, self._exponential_time(i))
return status

def _exponential_time(self, i):
if i == 0:
next_sleep = 0
else:
next_sleep = 0.05 * (2 ** i)
return next_sleep
6 changes: 3 additions & 3 deletions boto/ec2/connection.py
Expand Up @@ -2208,9 +2208,9 @@ def import_key_pair(self, key_name, public_key_material):
it to AWS.
:rtype: :class:`boto.ec2.keypair.KeyPair`
:return: The newly created :class:`boto.ec2.keypair.KeyPair`.
The material attribute of the new KeyPair object
will contain the the unencrypted PEM encoded RSA private key.
:return: A :class:`boto.ec2.keypair.KeyPair` object representing
the newly imported key pair. This object will contain only
the key name and the fingerprint.
"""
public_key_material = base64.b64encode(public_key_material)
params = {'KeyName': key_name,
Expand Down
16 changes: 16 additions & 0 deletions boto/gs/resumable_upload_handler.py
Expand Up @@ -161,6 +161,22 @@ def get_tracker_uri(self):
"""
return self.tracker_uri

def get_upload_id(self):
"""
Returns the upload ID for the resumable upload, or None if the upload
has not yet started.
"""
# We extract the upload_id from the tracker uri. We could retrieve the
# upload_id from the headers in the response but this only works for
# the case where we get the tracker uri from the service. In the case
# where we get the tracker from the tracking file we need to do this
# logic anyway.
delim = '?upload_id='
if self.tracker_uri and delim in self.tracker_uri:
return self.tracker_uri[self.tracker_uri.index(delim) + len(delim):]
else:
return None

def _remove_tracker_file(self):
if (self.tracker_file_name and
os.path.exists(self.tracker_file_name)):
Expand Down
9 changes: 7 additions & 2 deletions boto/provider.py
Expand Up @@ -117,7 +117,8 @@ class Provider(object):
'metadata-directive',
RESUMABLE_UPLOAD_HEADER_KEY: None,
SECURITY_TOKEN_HEADER_KEY: AWS_HEADER_PREFIX + 'security-token',
SERVER_SIDE_ENCRYPTION_KEY: AWS_HEADER_PREFIX + 'server-side-encryption',
SERVER_SIDE_ENCRYPTION_KEY: AWS_HEADER_PREFIX +
'server-side-encryption',
VERSION_ID_HEADER_KEY: AWS_HEADER_PREFIX + 'version-id',
STORAGE_CLASS_HEADER_KEY: AWS_HEADER_PREFIX + 'storage-class',
MFA_HEADER_KEY: AWS_HEADER_PREFIX + 'mfa',
Expand Down Expand Up @@ -166,6 +167,7 @@ class Provider(object):
def __init__(self, name, access_key=None, secret_key=None,
security_token=None):
self.host = None
self.port = None
self.access_key = access_key
self.secret_key = secret_key
self.security_token = security_token
Expand All @@ -176,10 +178,13 @@ def __init__(self, name, access_key=None, secret_key=None,
self.get_credentials(access_key, secret_key)
self.configure_headers()
self.configure_errors()
# allow config file to override default host
# Allow config file to override default host and port.
host_opt_name = '%s_host' % self.HostKeyMap[self.name]
if config.has_option('Credentials', host_opt_name):
self.host = config.get('Credentials', host_opt_name)
port_opt_name = '%s_port' % self.HostKeyMap[self.name]
if config.has_option('Credentials', port_opt_name):
self.port = config.getint('Credentials', port_opt_name)

def get_access_key(self):
if self._credentials_need_refresh():
Expand Down
4 changes: 3 additions & 1 deletion boto/redshift/__init__.py
Expand Up @@ -39,6 +39,9 @@ def regions():
RegionInfo(name='us-west-2',
endpoint='redshift.us-west-2.amazonaws.com',
connection_cls=cls),
RegionInfo(name='eu-west-1',
endpoint='redshift.eu-west-1.amazonaws.com',
connection_cls=cls),
]


Expand All @@ -47,4 +50,3 @@ def connect_to_region(region_name, **kw_params):
if region.name == region_name:
return region.connect(**kw_params)
return None

1 change: 1 addition & 0 deletions boto/s3/key.py
Expand Up @@ -1419,6 +1419,7 @@ def _get_file_internal(self, fp, headers=None, cb=None, num_cb=10,
if e.errno == errno.ENOSPC:
raise StorageDataError('Out of space for destination file '
'%s' % fp.name)
raise
if cb and (cb_count <= 1 or i > 0) and data_len > 0:
cb(data_len, cb_size)
for alg in digesters:
Expand Down
4 changes: 2 additions & 2 deletions boto/s3/resumable_download_handler.py
Expand Up @@ -263,9 +263,9 @@ def get_file(self, key, fp, headers, cb=None, num_cb=10, torrent=False,
headers = {}

# Use num-retries from constructor if one was provided; else check
# for a value specified in the boto config file; else default to 5.
# for a value specified in the boto config file; else default to 6.
if self.num_retries is None:
self.num_retries = config.getint('Boto', 'num_retries', 5)
self.num_retries = config.getint('Boto', 'num_retries', 6)
progress_less_iterations = 0

while True: # Retry as long as we're making progress.
Expand Down
27 changes: 18 additions & 9 deletions boto/storage_uri.py
Expand Up @@ -101,15 +101,7 @@ def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
@return: A connection to storage service provider of the given URI.
"""
connection_args = dict(self.connection_args or ())
# Use OrdinaryCallingFormat instead of boto-default
# SubdomainCallingFormat because the latter changes the hostname
# that's checked during cert validation for HTTPS connections,
# which will fail cert validation (when cert validation is enabled).
# Note: the following import can't be moved up to the start of
# this file else it causes a config import failure when run from
# the resumable upload/download tests.
from boto.s3.connection import OrdinaryCallingFormat
connection_args['calling_format'] = OrdinaryCallingFormat()

if (hasattr(self, 'suppress_consec_slashes') and
'suppress_consec_slashes' not in connection_args):
connection_args['suppress_consec_slashes'] = (
Expand All @@ -126,6 +118,23 @@ def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
self.provider_pool[self.scheme] = self.connection
elif self.scheme == 'gs':
from boto.gs.connection import GSConnection
# Use OrdinaryCallingFormat instead of boto-default
# SubdomainCallingFormat because the latter changes the hostname
# that's checked during cert validation for HTTPS connections,
# which will fail cert validation (when cert validation is
# enabled).
#
# The same is not true for S3's HTTPS certificates. In fact,
# we don't want to do this for S3 because S3 requires the
# subdomain to match the location of the bucket. If the proper
# subdomain is not used, the server will return a 301 redirect
# with no Location header.
#
# Note: the following import can't be moved up to the
# start of this file else it causes a config import failure when
# run from the resumable upload/download tests.
from boto.s3.connection import OrdinaryCallingFormat
connection_args['calling_format'] = OrdinaryCallingFormat()
self.connection = GSConnection(access_key_id,
secret_access_key,
**connection_args)
Expand Down

0 comments on commit a9e834a

Please sign in to comment.