Skip to content

Commit

Permalink
Add --max-retries and --retry-delay
Browse files Browse the repository at this point in the history
  • Loading branch information
cyno committed Oct 31, 2011
1 parent b3debba commit 88869ab
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 14 deletions.
2 changes: 2 additions & 0 deletions S3/Config.py
Expand Up @@ -78,6 +78,8 @@ class Config(object):
workers = 10
follow_symlinks=False
select_dir = False
max_retries = 5
retry_delay = 3

## Creating a singleton
def __new__(self, configfile = None):
Expand Down
21 changes: 12 additions & 9 deletions S3/S3.py
Expand Up @@ -47,7 +47,7 @@ def update_timestamp(self):
def format_param_str(self):
"""
Format URL parameters from self.params and returns
?parm1=val1&parm2=val2 or an empty string if there
?parm1=val1&parm2=val2 or an empty string if there
are no parameters. Output of this function should
be appended directly to self.resource['uri']
"""
Expand Down Expand Up @@ -119,9 +119,6 @@ class S3(object):
## S3 sometimes sends HTTP-307 response
redir_map = {}

## Maximum attempts of re-issuing failed requests
_max_retries = 5

def __init__(self, config):
self.config = config

Expand Down Expand Up @@ -458,9 +455,11 @@ def create_request(self, operation, uri = None, bucket = None, object = None, he

def _fail_wait(self, retries):
# Wait a few seconds. The more it fails the more we wait.
return (self._max_retries - retries + 1) * 3
return (self.config.max_retries - retries + 1) * self.config.retry_delay

def send_request(self, request, body = None, retries = _max_retries):
def send_request(self, request, body = None, retries = -1):
if retries == -1:
retries = self.config.max_retries
method_string, resource, headers = request.get_triplet()
debug("Processing request, please wait...")
if not headers.has_key('content-length'):
Expand Down Expand Up @@ -509,7 +508,9 @@ def send_request(self, request, body = None, retries = _max_retries):

return response

def send_file(self, request, file, labels, throttle = 0, retries = _max_retries):
def send_file(self, request, file, labels, throttle = 0, retries = -1):
if retries == -1:
retries = self.config.max_retries
method_string, resource, headers = request.get_triplet()
size_left = size_total = headers.get("content-length")
if self.config.progress_meter:
Expand Down Expand Up @@ -562,7 +563,7 @@ def send_file(self, request, file, labels, throttle = 0, retries = _max_retries)
if self.config.progress_meter:
progress.done("failed")
if retries:
if retries < self._max_retries:
if retries < self.config.max_retries:
throttle = throttle and throttle * 5 or 0.01
warning("Upload failed: %s (%s)" % (resource['uri'], e))
warning("Retrying on lower speed (throttle=%0.2f)" % throttle)
Expand Down Expand Up @@ -634,7 +635,9 @@ def send_file(self, request, file, labels, throttle = 0, retries = _max_retries)

return response

def recv_file(self, request, stream, labels, start_position = 0, retries = _max_retries):
def recv_file(self, request, stream, labels, start_position = 0, retries = -1):
if retries == -1:
retries = self.config.max_retries
method_string, resource, headers = request.get_triplet()
if self.config.progress_meter:
progress = self.config.progress_class(labels, 0)
Expand Down
4 changes: 3 additions & 1 deletion s3cmd
Expand Up @@ -1970,7 +1970,9 @@ def main():
optparser.add_option( "--workers", dest="workers", default=10, help="Sets the number of workers to run for uploading and downloading files (can only be used in conjunction with the --parallel argument)")

optparser.add_option( "--directory", dest="select_dir", action="store_true", default=False, help="Select directories (only for [ls]).")

optparser.add_option( "--max-retries", dest="max_retries", type="int", action="store", default=5, help="Number of retry before failing GET or PUT.")
optparser.add_option( "--retry-delay", dest="retry_delay", type="int", action="store", default=3, help="Time delay to wait after failing GET or PUT.")

optparser.set_usage(optparser.usage + " COMMAND [parameters]")
optparser.set_description('S3cmd is a tool for managing objects in '+
'Amazon S3 storage. It allows for making and removing '+
Expand Down
1 change: 0 additions & 1 deletion testsuite/etc/brokenlink.png
1 change: 0 additions & 1 deletion testsuite/etc/linked.png
1 change: 0 additions & 1 deletion testsuite/etc/linked1.png
1 change: 0 additions & 1 deletion testsuite/etc/more/linked-dir

0 comments on commit 88869ab

Please sign in to comment.