Skip to content

Commit

Permalink
* s3cmd, S3/S3.py, S3/Progress.py: Display "[X of Y]"
Browse files Browse the repository at this point in the history
  in --progress mode.



git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@295 830e0280-6d2a-0410-9c65-932aecc39d9d
  • Loading branch information
ludvigm committed Dec 22, 2008
1 parent d61f14b commit 154795f
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 14 deletions.
2 changes: 2 additions & 0 deletions ChangeLog
@@ -1,5 +1,7 @@
2008-12-22 Michal Ludvig <michal@logix.cz>

* s3cmd, S3/S3.py, S3/Progress.py: Display "[X of Y]"
in --progress mode.
* s3cmd, S3/Config.py: Implemented recursive [get].
Added --skip-existing option for [get] and [sync].

Expand Down
2 changes: 1 addition & 1 deletion S3/Progress.py
Expand Up @@ -44,7 +44,7 @@ def done(self, message):
self.display(done_message = message)

def output_labels(self):
self._stdout.write("%s -> %s\n" % (self.labels['source'], self.labels['destination']))
self._stdout.write("%(source)s -> %(destination)s %(extra)s\n" % self.labels)
self._stdout.flush()

def display(self, new_file = False, done_message = None):
Expand Down
8 changes: 4 additions & 4 deletions S3/S3.py
Expand Up @@ -167,7 +167,7 @@ def bucket_info(self, uri):
response['bucket-location'] = getTextFromXml(response['data'], "LocationConstraint") or "any"
return response

def object_put(self, filename, uri, extra_headers = None):
def object_put(self, filename, uri, extra_headers = None, extra_label = ""):
# TODO TODO
# Make it consistent with stream-oriented object_get()
if uri.type != "s3":
Expand All @@ -194,15 +194,15 @@ def object_put(self, filename, uri, extra_headers = None):
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
request = self.create_request("OBJECT_PUT", uri = uri, headers = headers)
labels = { 'source' : file.name, 'destination' : uri }
labels = { 'source' : file.name, 'destination' : uri, 'extra' : extra_label }
response = self.send_file(request, file, labels)
return response

def object_get(self, uri, stream, start_position = 0):
def object_get(self, uri, stream, start_position = 0, extra_label = ""):
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
request = self.create_request("OBJECT_GET", uri = uri)
labels = { 'source' : uri, 'destination' : stream.name }
labels = { 'source' : uri, 'destination' : stream.name, 'extra' : extra_label }
response = self.recv_file(request, stream, labels, start_position)
return response

Expand Down
21 changes: 12 additions & 9 deletions s3cmd
Expand Up @@ -208,10 +208,11 @@ def cmd_object_put(args):
uri_final = S3Uri(uri_arg_final)
extra_headers = {}
real_filename = file
seq_label = "[%d of %d]" % (seq, total)
if Config().encrypt:
exitcode, real_filename, extra_headers["x-amz-meta-s3tools-gpgenc"] = gpg_encrypt(file)
try:
response = s3.object_put(real_filename, uri_final, extra_headers)
response = s3.object_put(real_filename, uri_final, extra_headers, extra_label = seq_label)
except S3UploadError, e:
error("Upload of '%s' failed too many times. Skipping that file." % real_filename)
continue
Expand All @@ -220,9 +221,9 @@ def cmd_object_put(args):
continue
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
if not Config().progress_meter:
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
(file, uri_final, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
seq, total))
seq_label))
if Config().acl_public:
output("Public URL of the object is: %s" %
(uri_final.public_url()))
Expand Down Expand Up @@ -639,6 +640,7 @@ def cmd_sync_remote2local(src, dst):
seq += 1
uri = S3Uri(src_base + file)
dst_file = dst_base + file
seq_label = "[%d of %d]" % (seq, total_count)
try:
dst_dir = os.path.dirname(dst_file)
if not dir_cache.has_key(dst_dir):
Expand All @@ -658,7 +660,7 @@ def cmd_sync_remote2local(src, dst):
os.close(os.open(dst_file, open_flags))
# Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
dst_stream = open(dst_file, "wb")
response = s3.object_get(uri, dst_stream)
response = s3.object_get(uri, dst_stream, extra_label = seq_label)
dst_stream.close()
if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
Expand Down Expand Up @@ -699,9 +701,9 @@ def cmd_sync_remote2local(src, dst):
continue
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
if not Config().progress_meter:
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
(uri, dst_file, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
seq, total_count))
seq_label))
total_size += response["size"]

total_elapsed = time.time() - timestamp_start
Expand Down Expand Up @@ -786,11 +788,12 @@ def cmd_sync_local2remote(src, dst):
seq += 1
src = loc_list[file]['full_name']
uri = S3Uri(dst_base + file)
seq_label = "[%d of %d]" % (seq, total_count)
if cfg.preserve_attrs:
attr_header = _build_attr_header(src)
debug(attr_header)
try:
response = s3.object_put(src, uri, attr_header)
response = s3.object_put(src, uri, attr_header, extra_label = seq_label)
except S3UploadError, e:
error("%s: upload failed too many times. Skipping that file." % src)
continue
Expand All @@ -799,9 +802,9 @@ def cmd_sync_local2remote(src, dst):
continue
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
if not cfg.progress_meter:
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
(src, uri, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
seq, total_count))
seq_label))
total_size += response["size"]

total_elapsed = time.time() - timestamp_start
Expand Down

0 comments on commit 154795f

Please sign in to comment.