Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

* s3cmd: Support for recursive [cp] and [mv], including

  multiple-source arguments, --include/--exclude,
  --dry-run, etc.
* run-tests.py: Tests for the above.
* S3/S3.py: Preserve metadata (eg ACL or MIME type) 
  during [cp] and [mv].
* NEWS, TODO: Updated.



git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@389 830e0280-6d2a-0410-9c65-932aecc39d9d
  • Loading branch information...
commit e0b946c028d790956a27813a7adc59863bd72719 1 parent 3677a3b
@mludvig mludvig authored
Showing with 100 additions and 26 deletions.
  1. +10 −0 ChangeLog
  2. +4 −2 NEWS
  3. +4 −2 S3/S3.py
  4. +0 −1  TODO
  5. +34 −5 run-tests.py
  6. +48 −16 s3cmd
View
10 ChangeLog
@@ -1,5 +1,15 @@
2009-05-28 Michal Ludvig <michal@logix.cz>
+ * s3cmd: Support for recursive [cp] and [mv], including
+ multiple-source arguments, --include/--exclude,
+ --dry-run, etc.
+ * run-tests.py: Tests for the above.
+ * S3/S3.py: Preserve metadata (eg ACL or MIME type)
+ during [cp] and [mv].
+ * NEWS, TODO: Updated.
+
+2009-05-28 Michal Ludvig <michal@logix.cz>
+
* run-tests.py: Added --verbose mode.
2009-05-27 Michal Ludvig <michal@logix.cz>
View
6 NEWS
@@ -1,11 +1,13 @@
s3cmd 1.0.0
===========
-* New command 'sign' for signing for instance
- the POST upload policies.
+* New command 'sign' for signing e.g. POST upload policies.
* Fixed handling of filenames that differ only in
capitalisation (eg blah.txt vs Blah.TXT).
* Added --verbatim mode, preventing most filenames
pre-processing. Good for fixing unreadable buckets.
+* Added --recursive support for [cp] and [mv], including
+ multiple-source arguments, --include/--exclude, --dry-run, etc.
+
s3cmd 0.9.9 - 2009-02-17
===========
View
6 S3/S3.py
@@ -275,10 +275,12 @@ def object_copy(self, src_uri, dst_uri, extra_headers = None):
raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
headers = SortedDict(ignore_case = True)
headers['x-amz-copy-source'] = "/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object()))
+ ## TODO: For now COPY, later maybe add a switch?
+ headers['x-amz-metadata-directive'] = "COPY"
if self.config.acl_public:
headers["x-amz-acl"] = "public-read"
- if extra_headers:
- headers.update(extra_headers)
+ # if extra_headers:
+ # headers.update(extra_headers)
request = self.create_request("OBJECT_PUT", uri = dst_uri, headers = headers)
response = self.send_request(request)
return response
View
1  TODO
@@ -6,7 +6,6 @@ TODO list for s3cmd project
(at the moment it'll always download).
- Enable --exclude for [del], [setacl], [ls].
- Enable --dry-run for [del], [setacl], reject for all others.
- - Recursive cp/mv on remote "folders".
- Allow change /tmp to somewhere else
- With --guess-mime use 'magic' module if available.
- Support --preserve for [put] and [get]. Update manpage.
View
39 run-tests.py
@@ -344,15 +344,44 @@ def test_flushdir(label, dir_name):
test_s3cmd("Get multiple files", ['get', 's3://s3cmd-autotest-1/xyz/etc2/Logo.PNG', 's3://s3cmd-autotest-1/xyz/etc/AtomicClockRadio.ttf', 'testsuite-out'],
must_find = [ u"saved as 'testsuite-out/Logo.PNG'", u"saved as 'testsuite-out/AtomicClockRadio.ttf'" ])
-
-## ====== Copy between buckets
-test_s3cmd("Copy between buckets", ['cp', 's3://s3cmd-autotest-1/xyz/etc2/Logo.PNG', 's3://s3cmd-Autotest-3'],
- must_find = [ "File s3://s3cmd-autotest-1/xyz/etc2/Logo.PNG copied to s3://s3cmd-Autotest-3/xyz/etc2/Logo.PNG" ])
-
## ====== Upload files differing in capitalisation
test_s3cmd("blah.txt / Blah.txt", ['put', '-r', 'testsuite/blahBlah', 's3://s3cmd-autotest-1/'],
must_find = [ 's3://s3cmd-autotest-1/blahBlah/Blah.txt', 's3://s3cmd-autotest-1/blahBlah/blah.txt' ])
+## ====== Copy between buckets
+test_s3cmd("Copy between buckets", ['cp', 's3://s3cmd-autotest-1/xyz/etc2/Logo.PNG', 's3://s3cmd-Autotest-3/xyz/etc2/logo.png'],
+ must_find = [ "File s3://s3cmd-autotest-1/xyz/etc2/Logo.PNG copied to s3://s3cmd-Autotest-3/xyz/etc2/logo.png" ])
+
+## ====== Recursive copy
+test_s3cmd("Recursive copy, set ACL", ['cp', '-r', '--acl-public', 's3://s3cmd-autotest-1/xyz/', 's3://s3cmd-autotest-2/copy', '--exclude', '.svn/*'],
+ must_find = [ "File s3://s3cmd-autotest-1/xyz/etc2/Logo.PNG copied to s3://s3cmd-autotest-2/copy/etc2/Logo.PNG",
+ "File s3://s3cmd-autotest-1/xyz/blahBlah/Blah.txt copied to s3://s3cmd-autotest-2/copy/blahBlah/Blah.txt",
+ "File s3://s3cmd-autotest-1/xyz/blahBlah/blah.txt copied to s3://s3cmd-autotest-2/copy/blahBlah/blah.txt" ],
+ must_not_find = [ ".svn" ])
+
+## ====== Verify ACL and MIME type
+test_s3cmd("Verify ACL and MIME type", ['info', 's3://s3cmd-autotest-2/copy/etc2/Logo.PNG' ],
+ must_find_re = [ "MIME type:.*image/png",
+ "ACL:.*\*anon\*: READ",
+ "URL:.*http://s3cmd-autotest-2.s3.amazonaws.com/copy/etc2/Logo.PNG" ])
+
+## ====== Multi source move
+test_s3cmd("Multi-source move", ['mv', '-r', 's3://s3cmd-autotest-2/copy/blahBlah/Blah.txt', 's3://s3cmd-autotest-2/copy/etc/', 's3://s3cmd-autotest-2/moved/'],
+ must_find = [ "File s3://s3cmd-autotest-2/copy/blahBlah/Blah.txt moved to s3://s3cmd-autotest-2/moved/Blah.txt",
+ "File s3://s3cmd-autotest-2/copy/etc/AtomicClockRadio.ttf moved to s3://s3cmd-autotest-2/moved/AtomicClockRadio.ttf",
+ "File s3://s3cmd-autotest-2/copy/etc/TypeRa.ttf moved to s3://s3cmd-autotest-2/moved/TypeRa.ttf" ],
+ must_not_find = [ "blah.txt" ])
+
+## ====== Verify move
+test_s3cmd("Verify move", ['ls', '-r', 's3://s3cmd-autotest-2'],
+ must_find = [ "s3://s3cmd-autotest-2/moved/Blah.txt",
+ "s3://s3cmd-autotest-2/moved/AtomicClockRadio.ttf",
+ "s3://s3cmd-autotest-2/moved/TypeRa.ttf",
+ "s3://s3cmd-autotest-2/copy/blahBlah/blah.txt" ],
+ must_not_find = [ "s3://s3cmd-autotest-2/copy/blahBlah/Blah.txt",
+ "s3://s3cmd-autotest-2/copy/etc/AtomicClockRadio.ttf",
+ "s3://s3cmd-autotest-2/copy/etc/TypeRa.ttf" ])
+
## ====== Simple delete
test_s3cmd("Simple delete", ['del', 's3://s3cmd-autotest-1/xyz/etc2/Logo.PNG'],
must_find = [ "File s3://s3cmd-autotest-1/xyz/etc2/Logo.PNG deleted" ])
View
64 s3cmd
@@ -513,32 +513,64 @@ def subcmd_object_del_uri(uri, recursive = None):
response = s3.object_delete(_uri)
output(u"File %s deleted" % _uri)
-def subcmd_cp_mv(args, process_fce, message):
- src_uri = S3Uri(args.pop(0))
- dst_uri = S3Uri(args.pop(0))
+def subcmd_cp_mv(args, process_fce, action_str, message):
+ if len(args) < 2:
+ raise ParameterError("Expecting two or more S3 URIs for " + action_str)
+ dst_base_uri = S3Uri(args.pop())
+ if dst_base_uri.type != "s3":
+ raise ParameterError("Destination must be S3 URI. To download a file use 'get' or 'sync'.")
+ destination_base = dst_base_uri.uri()
- if len(args):
- raise ParameterError("Too many parameters! Expected: %s" % commands['cp']['param'])
+ remote_list = fetch_remote_list(args, require_attribs = False)
+ remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
+
+ remote_count = len(remote_list)
- if src_uri.type != "s3" or dst_uri.type != "s3":
- raise ParameterError("Parameters are not URIs! Expected: %s" % commands['cp']['param'])
+ info(u"Summary: %d remote files to %s" % (remote_count, action_str))
- if dst_uri.object() == "":
- dst_uri = S3Uri(dst_uri.uri() + src_uri.object())
+ if cfg.recursive:
+ if not destination_base.endswith("/"):
+ destination_base += "/"
+ for key in remote_list:
+ remote_list[key]['dest_name'] = destination_base + key
+ else:
+ key = remote_list.keys()[0]
+ if destination_base.endswith("/"):
+ remote_list[key]['dest_name'] = destination_base + key
+ else:
+ remote_list[key]['dest_name'] = destination_base
+
+ if cfg.dry_run:
+ for key in exclude_list:
+ output(u"exclude: %s" % unicodise(key))
+ for key in remote_list:
+ output(u"%s: %s -> %s" % (action_str, remote_list[key]['object_uri_str'], remote_list[key]['dest_name']))
- extra_headers = copy(cfg.extra_headers)
- response = process_fce(src_uri, dst_uri, extra_headers)
- output(message % { "src" : src_uri, "dst" : dst_uri})
- if Config().acl_public:
- output(u"Public URL is: %s" % dst_uri.public_url())
+ warning(u"Exitting now because of --dry-run")
+ return
+
+ seq = 0
+ for key in remote_list:
+ seq += 1
+ seq_label = "[%d of %d]" % (seq, remote_count)
+
+ item = remote_list[key]
+ src_uri = S3Uri(item['object_uri_str'])
+ dst_uri = S3Uri(item['dest_name'])
+
+ extra_headers = copy(cfg.extra_headers)
+ response = process_fce(src_uri, dst_uri, extra_headers)
+ output(message % { "src" : src_uri, "dst" : dst_uri })
+ if Config().acl_public:
+ info(u"Public URL is: %s" % dst_uri.public_url())
def cmd_cp(args):
s3 = S3(Config())
- subcmd_cp_mv(args, s3.object_copy, "File %(src)s copied to %(dst)s")
+ subcmd_cp_mv(args, s3.object_copy, "copy", "File %(src)s copied to %(dst)s")
def cmd_mv(args):
s3 = S3(Config())
- subcmd_cp_mv(args, s3.object_move, "File %(src)s moved to %(dst)s")
+ subcmd_cp_mv(args, s3.object_move, "move", "File %(src)s moved to %(dst)s")
def cmd_info(args):
s3 = S3(Config())
Please sign in to comment.
Something went wrong with that request. Please try again.