Skip to content
This repository has been archived by the owner on May 10, 2024. It is now read-only.

Commit

Permalink
Merge branch 's3-encoding-type' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
toastdriven committed Dec 19, 2013
2 parents dcf3cbd + 1c5b63a commit 6b2d967
Show file tree
Hide file tree
Showing 4 changed files with 171 additions and 26 deletions.
106 changes: 93 additions & 13 deletions boto/s3/bucket.py
Expand Up @@ -211,7 +211,8 @@ def _get_key_internal(self, key_name, headers, query_args_l):
raise self.connection.provider.storage_response_error(
response.status, response.reason, '')

def list(self, prefix='', delimiter='', marker='', headers=None):
def list(self, prefix='', delimiter='', marker='', headers=None,
encoding_type=None):
"""
List key objects within a bucket. This returns an instance of an
BucketListResultSet that automatically handles all of the result
Expand Down Expand Up @@ -243,13 +244,26 @@ def list(self, prefix='', delimiter='', marker='', headers=None):
:type marker: string
:param marker: The "marker" of where you are in the result set
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return BucketListResultSet(self, prefix, delimiter, marker, headers)
return BucketListResultSet(self, prefix, delimiter, marker, headers,
encoding_type=encoding_type)

def list_versions(self, prefix='', delimiter='', key_marker='',
version_id_marker='', headers=None):
version_id_marker='', headers=None, encoding_type=None):
"""
List version objects within a bucket. This returns an
instance of an VersionedBucketListResultSet that automatically
Expand All @@ -273,34 +287,63 @@ def list_versions(self, prefix='', delimiter='', key_marker='',
for more details.
:type marker: string
:param marker: The "marker" of where you are in the result set
:type key_marker: string
:param key_marker: The "marker" of where you are in the result set
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return VersionedBucketListResultSet(self, prefix, delimiter,
key_marker, version_id_marker,
headers)
headers,
encoding_type=encoding_type)

def list_multipart_uploads(self, key_marker='',
upload_id_marker='',
headers=None):
headers=None, encoding_type=None):
"""
List multipart upload objects within a bucket. This returns an
instance of an MultiPartUploadListResultSet that automatically
handles all of the result paging, etc. from S3. You just need
to keep iterating until there are no more results.
:type marker: string
:param marker: The "marker" of where you are in the result set
:type key_marker: string
:param key_marker: The "marker" of where you are in the result set
:type upload_id_marker: string
:param upload_id_marker: The upload identifier
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return MultiPartUploadListResultSet(self, key_marker,
upload_id_marker,
headers)
headers,
encoding_type=encoding_type)

def _get_all_query_args(self, params, initial_query_string=''):
pairs = []
Expand Down Expand Up @@ -381,12 +424,25 @@ def get_all_keys(self, headers=None, **params):
element in the CommonPrefixes collection. These rolled-up
keys are not returned elsewhere in the response.
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
self.validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix',
'marker', 'delimiter'])
'marker', 'delimiter',
'encoding_type'])
return self._get_all([('Contents', self.key_class),
('CommonPrefixes', Prefix)],
'', headers, **params)
Expand Down Expand Up @@ -421,6 +477,18 @@ def get_all_versions(self, headers=None, **params):
element in the CommonPrefixes collection. These rolled-up
keys are not returned elsewhere in the response.
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
Expand All @@ -440,7 +508,7 @@ def validate_get_all_versions_params(self, params):
"""
self.validate_kwarg_names(
params, ['maxkeys', 'max_keys', 'prefix', 'key_marker',
'version_id_marker', 'delimiter'])
'version_id_marker', 'delimiter', 'encoding_type'])

def get_all_multipart_uploads(self, headers=None, **params):
"""
Expand Down Expand Up @@ -476,12 +544,24 @@ def get_all_multipart_uploads(self, headers=None, **params):
list only if they have an upload ID lexicographically
greater than the specified upload_id_marker.
:param encoding_type: Requests Amazon S3 to encode the response and
specifies the encoding method to use.
An object key can contain any Unicode character; however, XML 1.0
parser cannot parse some characters, such as characters with an
ASCII value from 0 to 10. For characters that are not supported in
XML 1.0, you can add this parameter to request that Amazon S3
encode the keys in the response.
Valid options: ``url``
:type encoding_type: string
:rtype: ResultSet
:return: The result from S3 listing the uploads requested
"""
self.validate_kwarg_names(params, ['max_uploads', 'key_marker',
'upload_id_marker'])
'upload_id_marker', 'encoding_type'])
return self._get_all([('Upload', MultiPartUpload),
('CommonPrefixes', Prefix)],
'uploads', headers, **params)
Expand Down
35 changes: 23 additions & 12 deletions boto/s3/bucketlistresultset.py
Expand Up @@ -19,15 +19,17 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.

def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None):
def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,
encoding_type=None):
"""
A generator function for listing keys in a bucket.
"""
more_results = True
k = None
while more_results:
rs = bucket.get_all_keys(prefix=prefix, marker=marker,
delimiter=delimiter, headers=headers)
delimiter=delimiter, headers=headers,
encoding_type=encoding_type)
for k in rs:
yield k
if k:
Expand All @@ -43,20 +45,24 @@ class BucketListResultSet(object):
keys in a reasonably efficient manner.
"""

def __init__(self, bucket=None, prefix='', delimiter='', marker='', headers=None):
def __init__(self, bucket=None, prefix='', delimiter='', marker='',
headers=None, encoding_type=None):
self.bucket = bucket
self.prefix = prefix
self.delimiter = delimiter
self.marker = marker
self.headers = headers
self.encoding_type = encoding_type

def __iter__(self):
return bucket_lister(self.bucket, prefix=self.prefix,
delimiter=self.delimiter, marker=self.marker,
headers=self.headers)
headers=self.headers,
encoding_type=self.encoding_type)

def versioned_bucket_lister(bucket, prefix='', delimiter='',
key_marker='', version_id_marker='', headers=None):
key_marker='', version_id_marker='', headers=None,
encoding_type=None):
"""
A generator function for listing versions in a bucket.
"""
Expand All @@ -66,7 +72,7 @@ def versioned_bucket_lister(bucket, prefix='', delimiter='',
rs = bucket.get_all_versions(prefix=prefix, key_marker=key_marker,
version_id_marker=version_id_marker,
delimiter=delimiter, headers=headers,
max_keys=999)
max_keys=999, encoding_type=encoding_type)
for k in rs:
yield k
key_marker = rs.next_key_marker
Expand All @@ -83,24 +89,26 @@ class VersionedBucketListResultSet(object):
"""

def __init__(self, bucket=None, prefix='', delimiter='', key_marker='',
version_id_marker='', headers=None):
version_id_marker='', headers=None, encoding_type=None):
self.bucket = bucket
self.prefix = prefix
self.delimiter = delimiter
self.key_marker = key_marker
self.version_id_marker = version_id_marker
self.headers = headers
self.encoding_type = encoding_type

def __iter__(self):
return versioned_bucket_lister(self.bucket, prefix=self.prefix,
delimiter=self.delimiter,
key_marker=self.key_marker,
version_id_marker=self.version_id_marker,
headers=self.headers)
headers=self.headers,
encoding_type=self.encoding_type)

def multipart_upload_lister(bucket, key_marker='',
upload_id_marker='',
headers=None):
headers=None, encoding_type=None):
"""
A generator function for listing multipart uploads in a bucket.
"""
Expand All @@ -109,7 +117,8 @@ def multipart_upload_lister(bucket, key_marker='',
while more_results:
rs = bucket.get_all_multipart_uploads(key_marker=key_marker,
upload_id_marker=upload_id_marker,
headers=headers)
headers=headers,
encoding_type=encoding_type)
for k in rs:
yield k
key_marker = rs.next_key_marker
Expand All @@ -126,14 +135,16 @@ class MultiPartUploadListResultSet(object):
keys in a reasonably efficient manner.
"""
def __init__(self, bucket=None, key_marker='',
upload_id_marker='', headers=None):
upload_id_marker='', headers=None, encoding_type=None):
self.bucket = bucket
self.key_marker = key_marker
self.upload_id_marker = upload_id_marker
self.headers = headers
self.encoding_type = encoding_type

def __iter__(self):
return multipart_upload_lister(self.bucket,
key_marker=self.key_marker,
upload_id_marker=self.upload_id_marker,
headers=self.headers)
headers=self.headers,
encoding_type=self.encoding_type)
5 changes: 4 additions & 1 deletion boto/s3/multipart.py
Expand Up @@ -199,7 +199,8 @@ def endElement(self, name, value, connection):
else:
setattr(self, name, value)

def get_all_parts(self, max_parts=None, part_number_marker=None):
def get_all_parts(self, max_parts=None, part_number_marker=None,
encoding_type=None):
"""
Return the uploaded parts of this MultiPart Upload. This is
a lower-level method that requires you to manually page through
Expand All @@ -213,6 +214,8 @@ def get_all_parts(self, max_parts=None, part_number_marker=None):
query_args += '&max-parts=%d' % max_parts
if part_number_marker:
query_args += '&part-number-marker=%s' % part_number_marker
if encoding_type:
query_args += '&encoding-type=%s' % encoding_type
response = self.bucket.connection.make_request('GET', self.bucket.name,
self.key_name,
query_args=query_args)
Expand Down
51 changes: 51 additions & 0 deletions tests/unit/s3/test_bucket.py
Expand Up @@ -6,6 +6,10 @@

from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.s3.deletemarker import DeleteMarker
from boto.s3.key import Key
from boto.s3.multipart import MultiPartUpload
from boto.s3.prefix import Prefix


class TestS3Bucket(AWSMockServiceTestCase):
Expand Down Expand Up @@ -125,3 +129,50 @@ def test_bucket_copy_key_no_validate(self, mock_get_all_keys):
# Will throw because of empty response.
pass
self.assertFalse(mock_get_all_keys.called)

@patch.object(Bucket, '_get_all')
def test_bucket_encoding(self, mock_get_all):
self.set_http_response(status_code=200)
bucket = self.service_connection.get_bucket('mybucket')

# First, without the encoding.
mock_get_all.reset_mock()
bucket.get_all_keys()
mock_get_all.assert_called_with(
[
('Contents', Key),
('CommonPrefixes', Prefix)
], '', None
)

# Now the variants with the encoding.
mock_get_all.reset_mock()
bucket.get_all_keys(encoding_type='url')
mock_get_all.assert_called_with(
[
('Contents', Key),
('CommonPrefixes', Prefix)
], '', None,
encoding_type='url'
)

mock_get_all.reset_mock()
bucket.get_all_versions(encoding_type='url')
mock_get_all.assert_called_with(
[
('Version', Key),
('CommonPrefixes', Prefix),
('DeleteMarker', DeleteMarker),
], 'versions', None,
encoding_type='url'
)

mock_get_all.reset_mock()
bucket.get_all_multipart_uploads(encoding_type='url')
mock_get_all.assert_called_with(
[
('Upload', MultiPartUpload),
('CommonPrefixes', Prefix)
], 'uploads', None,
encoding_type='url'
)

0 comments on commit 6b2d967

Please sign in to comment.