Permalink
Browse files

Merge branch 'release-2.9.0'

* release-2.9.0: (158 commits)
  Bump version to 2.9.0
  Added underlying DynamoDB v2 support.
  Add redshift to setup.py/docs index
  Updated requests to something more modern.
  Only use 2 metadata service calls to get credentials
  Fix #1146: return response from custom url opener
  Fixed missing import.
  Add metadata_service_num_attempts config option
  Added cleanup for the snapshots created.
  Added support for redshift.
  Let total attempts by 1 + num_retries
  Add more diagnostics to debug logs
  Change GS calls to make_request to always convert to utf-8 bytes.
  Allow kwargs to be passed through to uplaoder
  Remove whitespace, fix long line lengths
  Improve VPC and VPN support
  Added sleeps to allow amazon time to propogate
  Added error handling for out of space during downloads
  Initial integration tests for idempotent subscribe
  Removed dead code from resumable upload handler
  ...
  • Loading branch information...
2 parents b5852b0 + 699d861 commit 89f4947000587e12042e5b35c4557871b21137b9 @jamesls jamesls committed Apr 18, 2013
Showing with 13,362 additions and 1,798 deletions.
  1. +2 −1 .gitignore
  2. +1 −1 README.rst
  3. +75 −0 bin/dynamodb_dump
  4. +109 −0 bin/dynamodb_load
  5. +1 −1 bin/mturk
  6. +1 −9 bin/sdbadmin
  7. +55 −14 boto/__init__.py
  8. +3 −3 boto/auth.py
  9. +1 −1 boto/beanstalk/exception.py
  10. +303 −317 boto/beanstalk/layer1.py
  11. +1 −1 boto/beanstalk/response.py
  12. +1,564 −0 boto/cacerts/cacerts.txt
  13. +3 −0 boto/cloudsearch/__init__.py
  14. +20 −14 boto/cloudsearch/document.py
  15. +1 −1 boto/cloudsearch/layer1.py
  16. +1 −1 boto/cloudsearch/search.py
  17. +8 −3 boto/connection.py
  18. +1 −1 boto/core/credentials.py
  19. +1 −1 boto/datapipeline/layer1.py
  20. +3 −0 boto/dynamodb/__init__.py
  21. +6 −0 boto/dynamodb/item.py
  22. +2 −2 boto/dynamodb/layer2.py
  23. +1 −1 boto/dynamodb/table.py
  24. +63 −0 boto/dynamodb2/__init__.py
  25. +50 −0 boto/dynamodb2/exceptions.py
  26. +1,407 −0 boto/dynamodb2/layer1.py
  27. +71 −0 boto/ec2/attributes.py
  28. +13 −12 boto/ec2/blockdevicemapping.py
  29. +79 −38 boto/ec2/connection.py
  30. +37 −24 boto/ec2/image.py
  31. +4 −0 boto/ec2/instance.py
  32. +1 −1 boto/elasticache/__init__.py
  33. +4 −3 boto/elasticache/layer1.py
  34. +17 −16 boto/elastictranscoder/layer1.py
  35. +1 −1 boto/emr/connection.py
  36. +2 −2 boto/exception.py
  37. +36 −5 boto/file/key.py
  38. +67 −41 boto/fps/connection.py
  39. +18 −3 boto/glacier/concurrent.py
  40. +9 −2 boto/glacier/vault.py
  41. +10 −6 boto/gs/acl.py
  42. +109 −31 boto/gs/bucket.py
  43. +5 −3 boto/gs/connection.py
  44. +232 −35 boto/gs/key.py
  45. +56 −33 boto/gs/resumable_upload_handler.py
  46. +12 −0 boto/handler.py
  47. +2 −0 boto/https_connection.py
  48. 0 boto/opsworks/__init__.py
  49. +30 −0 boto/opsworks/exceptions.py
  50. +1,457 −0 boto/opsworks/layer1.py
  51. +10 −4 boto/provider.py
  52. +25 −16 boto/rds/dbsecuritygroup.py
  53. +50 −0 boto/redshift/__init__.py
  54. +182 −0 boto/redshift/exceptions.py
  55. +2,076 −0 boto/redshift/layer1.py
  56. +1 −1 boto/route53/connection.py
  57. +3 −0 boto/s3/__init__.py
  58. +39 −15 boto/s3/bucket.py
  59. +55 −2 boto/s3/connection.py
  60. +143 −81 boto/s3/key.py
  61. +6 −6 boto/s3/keyfile.py
  62. +16 −11 boto/s3/resumable_download_handler.py
  63. +111 −55 boto/s3/website.py
  64. +15 −7 boto/sns/connection.py
  65. +2 −6 boto/sqs/connection.py
  66. +2 −2 boto/sqs/queue.py
  67. +127 −73 boto/storage_uri.py
  68. +15 −6 boto/swf/__init__.py
  69. +2 −2 boto/swf/layer2.py
  70. +34 −15 boto/utils.py
  71. +38 −5 boto/vpc/__init__.py
  72. +30 −2 boto/vpc/vpc.py
  73. +147 −6 boto/vpc/vpnconnection.py
  74. +5 −6 docs/source/autoscale_tut.rst
  75. +281 −7 docs/source/boto_config_tut.rst
  76. +81 −60 docs/source/cloudsearch_tut.rst
  77. +3 −3 docs/source/cloudwatch_tut.rst
  78. +340 −339 docs/source/dynamodb_tut.rst
  79. +103 −15 docs/source/ec2_tut.rst
  80. +18 −39 docs/source/elb_tut.rst
  81. +9 −10 docs/source/emr_tut.rst
  82. +177 −0 docs/source/getting_started.rst
  83. +18 −1 docs/source/index.rst
  84. +108 −0 docs/source/rds_tut.rst
  85. +1 −1 docs/source/ref/cloudsearch.rst
  86. +26 −0 docs/source/ref/dynamodb2.rst
  87. +1 −0 docs/source/ref/index.rst
  88. +26 −0 docs/source/ref/redshift.rst
  89. +198 −132 docs/source/s3_tut.rst
  90. +6 −5 docs/source/ses_tut.rst
  91. +5 −2 docs/source/simpledb_tut.rst
  92. +4 −4 docs/source/sqs_tut.rst
  93. +10 −1 docs/source/vpc_tut.rst
  94. +5 −2 requirements.txt
  95. +4 −2 setup.py
  96. 0 tests/integration/dynamodb2/__init__.py
  97. +40 −0 tests/integration/dynamodb2/test_cert_verification.py
  98. +244 −0 tests/integration/dynamodb2/test_layer1.py
  99. +47 −55 tests/integration/ec2/elb/test_connection.py
  100. +5 −0 tests/integration/elastictranscoder/test_layer1.py
  101. +1 −1 tests/integration/gs/test_basic.py
  102. +19 −19 tests/integration/gs/test_generation_conditionals.py
  103. +1 −5 tests/integration/gs/test_resumable_downloads.py
  104. +0 −2 tests/integration/gs/test_resumable_uploads.py
  105. +101 −57 tests/integration/gs/test_storage_uri.py
  106. +4 −5 tests/integration/gs/test_versioning.py
  107. +22 −0 tests/integration/gs/util.py
  108. 0 tests/integration/opsworks/__init__.py
  109. +40 −0 tests/integration/opsworks/test_layer1.py
  110. 0 tests/integration/redshift/__init__.py
  111. +35 −0 tests/integration/redshift/test_cert_verification.py
  112. +134 −0 tests/integration/redshift/test_layer1.py
  113. +14 −17 tests/integration/s3/mock_storage_service.py
  114. +2 −1 tests/integration/s3/test_connection.py
  115. +101 −0 tests/integration/sns/test_sns_sqs_subscription.py
  116. +50 −26 tests/integration/sqs/test_connection.py
  117. +4 −1 tests/test.py
  118. +3 −12 tests/unit/beanstalk/test_layer1.py
  119. +1 −0 tests/unit/cloudsearch/__init__.py
  120. +241 −0 tests/unit/cloudsearch/test_connection.py
  121. +324 −0 tests/unit/cloudsearch/test_document.py
  122. +325 −0 tests/unit/cloudsearch/test_search.py
  123. +86 −0 tests/unit/ec2/test_connection.py
  124. +56 −0 tests/unit/glacier/test_concurrent.py
  125. +10 −0 tests/unit/glacier/test_vault.py
  126. +32 −21 tests/unit/provider/test_provider.py
  127. +48 −0 tests/unit/s3/test_bucket.py
  128. +3 −3 tests/unit/s3/test_keyfile.py
  129. +257 −0 tests/unit/s3/test_uri.py
  130. +71 −1 tests/unit/s3/test_website.py
  131. +206 −0 tests/unit/test_connection.py
  132. +78 −0 tests/unit/test_exception.py
  133. 0 tests/unit/utils/__init__.py
  134. +81 −0 tests/unit/utils/test_utils.py
  135. 0 tests/unit/vpc/__init__.py
  136. +40 −0 tests/unit/vpc/test_vpc.py
  137. +123 −0 tests/unit/vpc/test_vpnconnection.py
View
@@ -10,4 +10,5 @@ MANIFEST
.idea
.tox
.coverage
-*flymake.py
+*flymake.py
+venv
View
@@ -137,7 +137,7 @@ Getting Started with Boto
*************************
Your credentials can be passed into the methods that create
-connections. Alternatively, boto will check for the existance of the
+connections. Alternatively, boto will check for the existence of the
following environment variables to ascertain your credentials:
**AWS_ACCESS_KEY_ID** - Your AWS Access Key ID
View
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+import argparse
+import errno
+import os
+
+import boto
+from boto.compat import json
+
+
+DESCRIPTION = """Dump the contents of one or more DynamoDB tables to the local filesystem.
+
+Each table is dumped into two files:
+ - {table_name}.metadata stores the table's name, schema and provisioned
+ throughput.
+ - {table_name}.data stores the table's actual contents.
+
+Both files are created in the current directory. To write them somewhere else,
+use the --out-dir parameter (the target directory will be created if needed).
+"""
+
+
+def dump_table(table, out_dir):
+ metadata_file = os.path.join(out_dir, "%s.metadata" % table.name)
+ data_file = os.path.join(out_dir, "%s.data" % table.name)
+
+ with open(metadata_file, "w") as metadata_fd:
+ json.dump(
+ {
+ "name": table.name,
+ "schema": table.schema.dict,
+ "read_units": table.read_units,
+ "write_units": table.write_units,
+ },
+ metadata_fd
+ )
+
+ with open(data_file, "w") as data_fd:
+ for item in table.scan():
+ # JSON can't serialize sets -- convert those to lists.
+ data = {}
+ for k, v in item.iteritems():
+ if isinstance(v, (set, frozenset)):
+ data[k] = list(v)
+ else:
+ data[k] = v
+
+ data_fd.write(json.dumps(data))
+ data_fd.write("\n")
+
+
+def dynamodb_dump(tables, out_dir):
+ try:
+ os.makedirs(out_dir)
+ except OSError as e:
+ # We don't care if the dir already exists.
+ if e.errno != errno.EEXIST:
+ raise
+
+ conn = boto.connect_dynamodb()
+ for t in tables:
+ dump_table(conn.get_table(t), out_dir)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="dynamodb_dump",
+ description=DESCRIPTION
+ )
+ parser.add_argument("--out-dir", default=".")
+ parser.add_argument("tables", metavar="TABLES", nargs="+")
+
+ namespace = parser.parse_args()
+
+ dynamodb_dump(namespace.tables, namespace.out_dir)
View
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+
+import argparse
+import os
+
+import boto
+from boto.compat import json
+from boto.dynamodb.schema import Schema
+
+
+DESCRIPTION = """Load data into one or more DynamoDB tables.
+
+For each table, data is read from two files:
+ - {table_name}.metadata for the table's name, schema and provisioned
+ throughput (only required if creating the table).
+ - {table_name}.data for the table's actual contents.
+
+Both files are searched for in the current directory. To read them from
+somewhere else, use the --in-dir parameter.
+
+This program does not wipe the tables prior to loading data. However, any
+items present in the data files will overwrite the table's contents.
+"""
+
+
+def _json_iterload(fd):
+ """Lazily load newline-separated JSON objects from a file-like object."""
+ buffer = ""
+ eof = False
+ while not eof:
+ try:
+ # Add a line to the buffer
+ buffer += fd.next()
+ except StopIteration:
+ # We can't let that exception bubble up, otherwise the last
+ # object in the file will never be decoded.
+ eof = True
+ try:
+ # Try to decode a JSON object.
+ json_object = json.loads(buffer.strip())
+
+ # Success: clear the buffer (everything was decoded).
+ buffer = ""
+ except ValueError:
+ if eof and buffer.strip():
+ # No more lines to load and the buffer contains something other
+ # than whitespace: the file is, in fact, malformed.
+ raise
+ # We couldn't decode a complete JSON object: load more lines.
+ continue
+
+ yield json_object
+
+
+def create_table(metadata_fd):
+ """Create a table from a metadata file-like object."""
+
+
+def load_table(table, in_fd):
+ """Load items into a table from a file-like object."""
+ for i in _json_iterload(in_fd):
+ # Convert lists back to sets.
+ data = {}
+ for k, v in i.iteritems():
+ if isinstance(v, list):
+ data[k] = set(v)
+ else:
+ data[k] = v
+ table.new_item(attrs=i).put()
+
+
+def dynamodb_load(tables, in_dir, create_tables):
+ conn = boto.connect_dynamodb()
+ for t in tables:
+ metadata_file = os.path.join(in_dir, "%s.metadata" % t)
+ data_file = os.path.join(in_dir, "%s.data" % t)
+ if create_tables:
+ with open(metadata_file) as meta_fd:
+ metadata = json.load(meta_fd)
+ table = conn.create_table(
+ name=t,
+ schema=Schema(metadata["schema"]),
+ read_units=metadata["read_units"],
+ write_units=metadata["write_units"],
+ )
+ table.refresh(wait_for_active=True)
+ else:
+ table = conn.get_table(t)
+
+ with open(data_file) as in_fd:
+ load_table(table, in_fd)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="dynamodb_load",
+ description=DESCRIPTION
+ )
+ parser.add_argument(
+ "--create-tables",
+ action="store_true",
+ help="Create the tables if they don't exist already (without this flag, attempts to load data into non-existing tables fail)."
+ )
+ parser.add_argument("--in-dir", default=".")
+ parser.add_argument("tables", metavar="TABLES", nargs="+")
+
+ namespace = parser.parse_args()
+
+ dynamodb_load(namespace.tables, namespace.in_dir, namespace.create_tables)
View
@@ -25,8 +25,8 @@ import os.path
import string
import inspect
import datetime, calendar
-import json
import boto.mturk.connection, boto.mturk.price, boto.mturk.question, boto.mturk.qualification
+from boto.compat import json
# --------------------------------------------------
# Globals
View
@@ -26,15 +26,7 @@ VERSION = "%prog version 1.0"
import boto
import time
from boto import sdb
-
-# Allow support for JSON
-try:
- import simplejson as json
-except:
- try:
- import json
- except:
- json = False
+from boto.compat import json
def choice_input(options, default=None, title=None):
"""
View
@@ -2,6 +2,7 @@
# Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# Copyright (c) 2010, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -35,12 +36,20 @@
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.8.0'
+__version__ = '2.9.0'
Version = __version__ # for backware compatibility
UserAgent = 'Boto/%s (%s)' % (__version__, sys.platform)
config = Config()
+# Regex to disallow buckets violating charset or not [3..255] chars total.
+BUCKET_NAME_RE = re.compile(r'^[a-z0-9][a-z0-9\._-]{1,253}[a-z0-9]$')
+# Regex to disallow buckets with individual DNS labels longer than 63.
+TOO_LONG_DNS_NAME_COMP = re.compile(r'[-_a-z0-9]{64}')
+GENERATION_RE = re.compile(r'(?P<versionless_uri_str>.+)'
+ r'#(?P<generation>[0-9]+)$')
+VERSION_RE = re.compile('(?P<versionless_uri_str>.+)#(?P<version_id>.+)$')
+
def init_logging():
for file in BotoConfigLocations:
@@ -655,9 +664,19 @@ def connect_elastictranscoder(aws_access_key_id=None,
**kwargs)
+def connect_opsworks(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ from boto.opsworks.layer1 import OpsWorksConnection
+ return OpsWorksConnection(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ **kwargs)
+
+
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
- suppress_consec_slashes=True):
+ suppress_consec_slashes=True, is_latest=False):
"""
Instantiate a StorageUri from a URI string.
@@ -673,6 +692,9 @@ def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
:param bucket_storage_uri_class: Allows mocking for unit tests.
:param suppress_consec_slashes: If provided, controls whether
consecutive slashes will be suppressed in key paths.
+ :type is_latest: bool
+ :param is_latest: whether this versioned object represents the
+ current version.
We allow validate to be disabled to allow caller
to implement bucket-level wildcarding (outside the boto library;
@@ -684,14 +706,17 @@ def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
``uri_str`` must be one of the following formats:
* gs://bucket/name
+ * gs://bucket/name#ver
* s3://bucket/name
* gs://bucket
* s3://bucket
* filename (which could be a Unix path like /a/b/c or a Windows path like
C:\a\b\c)
- The last example uses the default scheme ('file', unless overridden)
+ The last example uses the default scheme ('file', unless overridden).
"""
+ version_id = None
+ generation = None
# Manually parse URI components instead of using urlparse.urlparse because
# what we're calling URIs don't really fit the standard syntax for URIs
@@ -708,7 +733,8 @@ def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
if not (platform.system().lower().startswith('windows')
and colon_pos == 1
and drive_char >= 'a' and drive_char <= 'z'):
- raise InvalidUriError('"%s" contains ":" instead of "://"' % uri_str)
+ raise InvalidUriError('"%s" contains ":" instead of "://"' %
+ uri_str)
scheme = default_scheme.lower()
path = uri_str
else:
@@ -727,23 +753,38 @@ def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
else:
path_parts = path.split('/', 1)
bucket_name = path_parts[0]
- if (validate and bucket_name and
- # Disallow buckets violating charset or not [3..255] chars total.
- (not re.match('^[a-z0-9][a-z0-9\._-]{1,253}[a-z0-9]$', bucket_name)
- # Disallow buckets with individual DNS labels longer than 63.
- or re.search('[-_a-z0-9]{64}', bucket_name))):
- raise InvalidUriError('Invalid bucket name in URI "%s"' % uri_str)
- # If enabled, ensure the bucket name is valid, to avoid possibly
- # confusing other parts of the code. (For example if we didn't
+ object_name = ''
+ # If validate enabled, ensure the bucket name is valid, to avoid
+ # possibly confusing other parts of the code. (For example if we didn't
# catch bucket names containing ':', when a user tried to connect to
# the server with that name they might get a confusing error about
# non-integer port numbers.)
- object_name = ''
+ if (validate and bucket_name and
+ (not BUCKET_NAME_RE.match(bucket_name)
+ or TOO_LONG_DNS_NAME_COMP.search(bucket_name))):
+ raise InvalidUriError('Invalid bucket name in URI "%s"' % uri_str)
+ if scheme == 'gs':
+ match = GENERATION_RE.search(path)
+ if match:
+ md = match.groupdict()
+ versionless_uri_str = md['versionless_uri_str']
+ path_parts = versionless_uri_str.split('/', 1)
+ generation = int(md['generation'])
+ elif scheme == 's3':
+ match = VERSION_RE.search(path)
+ if match:
+ md = match.groupdict()
+ versionless_uri_str = md['versionless_uri_str']
+ path_parts = versionless_uri_str.split('/', 1)
+ version_id = md['version_id']
+ else:
+ raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
if len(path_parts) > 1:
object_name = path_parts[1]
return bucket_storage_uri_class(
scheme, bucket_name, object_name, debug,
- suppress_consec_slashes=suppress_consec_slashes)
+ suppress_consec_slashes=suppress_consec_slashes,
+ version_id=version_id, generation=generation, is_latest=is_latest)
def storage_uri_for_key(key):
View
@@ -164,9 +164,9 @@ def add_auth(self, http_request, **kwargs):
boto.log.debug('StringToSign:\n%s' % string_to_sign)
b64_hmac = self.sign_string(string_to_sign)
auth_hdr = self._provider.auth_header
- headers['Authorization'] = ("%s %s:%s" %
- (auth_hdr,
- self._provider.access_key, b64_hmac))
+ auth = ("%s %s:%s" % (auth_hdr, self._provider.access_key, b64_hmac))
+ boto.log.debug('Signature:\n%s' % auth)
+ headers['Authorization'] = auth
class HmacAuthV2Handler(AuthHandler, HmacKeys):
@@ -1,5 +1,5 @@
import sys
-import json
+from boto.compat import json
from boto.exception import BotoServerError
Oops, something went wrong.

0 comments on commit 89f4947

Please sign in to comment.