Skip to content

Commit

Permalink
Allows removal of ACLs
Browse files Browse the repository at this point in the history
fixes bug 1049017

As per the bug report, the -remove- form of deletion only works with
 meta headers and not with x-container-read and x-container-write.

This patch by Yukihiro KAWADA detects the container acls and sends
them through to the backend empty like the other metadata values.

patch2 extends metadata-helper in ContainerController tests so that
the new functionality can be tested.

patch3 changes the k.lower().startswith() commands for read/write to
a single k.lower - thanks David :)

patch4 fixes PEP8

patch5 fixes more than two hundred pep8 errors. Except one,
 where the pep8 tool complains of overindent, or underindent
depending on the position on the line. No idea how to fix that one.

patch6 fixes the remaining pep8 error - thanks Darrell

Change-Id: I36c2dd72b4636136c5ce7db414bf2b61501090ad
  • Loading branch information
fifieldt committed Oct 10, 2012
1 parent 1ef17d8 commit 124e75b
Show file tree
Hide file tree
Showing 2 changed files with 492 additions and 388 deletions.
50 changes: 30 additions & 20 deletions swift/proxy/controllers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,15 +107,21 @@ def __init__(self, app):
self.trans_id = '-'

def transfer_headers(self, src_headers, dst_headers):
x_remove = 'x-remove-%s-meta-' % self.server_type.lower()
x_meta = 'x-%s-meta-' % self.server_type.lower()

st = self.server_type.lower()
x_remove = 'x-remove-%s-meta-' % st
x_remove_read = 'x-remove-%s-read' % st
x_remove_write = 'x-remove-%s-write' % st
x_meta = 'x-%s-meta-' % st
dst_headers.update((k.lower().replace('-remove', '', 1), '')
for k in src_headers
if k.lower().startswith(x_remove))
if k.lower().startswith(x_remove) or
k.lower() in (x_remove_read, x_remove_write))

dst_headers.update((k.lower(), v)
for k, v in src_headers.iteritems()
if k.lower() in self.pass_through_headers or
k.lower().startswith(x_meta))
k.lower().startswith(x_meta))

def error_increment(self, node):
"""
Expand All @@ -135,7 +141,8 @@ def error_occurred(self, node, msg):
"""
self.error_increment(node)
self.app.logger.error(_('%(msg)s %(ip)s:%(port)s'),
{'msg': msg, 'ip': node['ip'], 'port': node['port']})
{'msg': msg, 'ip': node['ip'],
'port': node['port']})

def exception_occurred(self, node, typ, additional_info):
"""
Expand Down Expand Up @@ -220,7 +227,8 @@ def account_info(self, account, autocreate=False):
try:
with ConnectionTimeout(self.app.conn_timeout):
conn = http_connect(node['ip'], node['port'],
node['device'], partition, 'HEAD', path, headers)
node['device'], partition, 'HEAD',
path, headers)
with Timeout(self.app.node_timeout):
resp = conn.getresponse()
body = resp.read()
Expand All @@ -241,18 +249,19 @@ def account_info(self, account, autocreate=False):
result_code = -1
except (Exception, Timeout):
self.exception_occurred(node, _('Account'),
_('Trying to get account info for %s') % path)
_('Trying to get account info for %s')
% path)
if result_code == HTTP_NOT_FOUND and autocreate:
if len(account) > MAX_ACCOUNT_NAME_LENGTH:
return None, None, None
headers = {'X-Timestamp': normalize_timestamp(time.time()),
'X-Trans-Id': self.trans_id,
'Connection': 'close'}
resp = self.make_requests(Request.blank('/v1' + path),
self.app.account_ring, partition, 'PUT',
path, [headers] * len(nodes))
self.app.account_ring, partition, 'PUT',
path, [headers] * len(nodes))
if not is_success(resp.status_int):
self.app.logger.warning('Could not autocreate account %r' % \
self.app.logger.warning('Could not autocreate account %r' %
path)
return None, None, None
result_code = HTTP_OK
Expand All @@ -262,8 +271,9 @@ def account_info(self, account, autocreate=False):
else:
cache_timeout = self.app.recheck_account_existence * 0.1
self.app.memcache.set(cache_key,
{'status': result_code, 'container_count': container_count},
timeout=cache_timeout)
{'status': result_code,
'container_count': container_count},
timeout=cache_timeout)
if result_code == HTTP_OK:
return partition, nodes, container_count
return None, None, None
Expand Down Expand Up @@ -381,23 +391,23 @@ def _make_request(self, nodes, part, method, path, headers, query,
try:
with ConnectionTimeout(self.app.conn_timeout):
conn = http_connect(node['ip'], node['port'],
node['device'], part, method, path,
headers=headers, query_string=query)
node['device'], part, method, path,
headers=headers, query_string=query)
conn.node = node
with Timeout(self.app.node_timeout):
resp = conn.getresponse()
if not is_informational(resp.status) and \
not is_server_error(resp.status):
not is_server_error(resp.status):
return resp.status, resp.reason, resp.read()
elif resp.status == HTTP_INSUFFICIENT_STORAGE:
self.error_limit(node)
except (Exception, Timeout):
self.exception_occurred(node, self.server_type,
_('Trying to %(method)s %(path)s') %
{'method': method, 'path': path})
_('Trying to %(method)s %(path)s') %
{'method': method, 'path': path})

def make_requests(self, req, ring, part, method, path, headers,
query_string=''):
query_string=''):
"""
Sends an HTTP request to multiple nodes and aggregates the results.
It attempts the primary nodes concurrently, then iterates over the
Expand All @@ -418,7 +428,7 @@ def make_requests(self, req, ring, part, method, path, headers,
response.append((HTTP_SERVICE_UNAVAILABLE, '', ''))
statuses, reasons, bodies = zip(*response)
return self.best_response(req, statuses, reasons, bodies,
'%s %s' % (self.server_type, req.method))
'%s %s' % (self.server_type, req.method))

def best_response(self, req, statuses, reasons, bodies, server_type,
etag=None):
Expand Down Expand Up @@ -495,7 +505,7 @@ def _make_app_iter_reader(self, node, source, queue, logger_thread_locals):
success = False
except (Exception, Timeout):
self.exception_occurred(node, _('Object'),
_('Trying to read during GET'))
_('Trying to read during GET'))
success = False
finally:
# Ensure the queue getter gets a terminator.
Expand Down

0 comments on commit 124e75b

Please sign in to comment.