Skip to content
Browse files

* s3cmd.1: Document all the new options and commands.

* s3cmd, S3/Config.py: Updated some help texts. Removed
  option --debug-syncmatch along the way (because --dry-run
  with --debug is good enough).
* TODO: Updated.



git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@376 830e0280-6d2a-0410-9c65-932aecc39d9d
  • Loading branch information...
1 parent 5c805fd commit 01a805248c1b925f44984c545355ca1cb51c8fb6 @mludvig mludvig committed
Showing with 173 additions and 93 deletions.
  1. +8 −0 ChangeLog
  2. +0 −1 S3/Config.py
  3. +4 −3 TODO
  4. +4 −12 s3cmd
  5. +157 −77 s3cmd.1
View
8 ChangeLog
@@ -1,3 +1,11 @@
+2009-02-17 Michal Ludvig <michal@logix.cz>
+
+ * s3cmd.1: Document all the new options and commands.
+ * s3cmd, S3/Config.py: Updated some help texts. Removed
+ option --debug-syncmatch along the way (because --dry-run
+ with --debug is good enough).
+ * TODO: Updated.
+
2009-02-16 Michal Ludvig <michal@logix.cz>
* s3cmd: Check Python version >= 2.4 as soon as possible.
View
1 S3/Config.py
@@ -59,7 +59,6 @@ class Config(object):
bucket_location = "US"
default_mime_type = "binary/octet-stream"
guess_mime_type = True
- debug_syncmatch = False
# List of checks to be performed for 'sync'
sync_checks = ['size', 'md5'] # 'weak-timestamp'
# List of compiled REGEXPs
View
7 TODO
@@ -6,12 +6,13 @@ TODO list for s3cmd project
(at the moment it'll always download).
- Enable --exclude for [del], [setacl], [ls].
- Enable --dry-run for [del], [setacl], reject for all others.
- - Add testsuite for new put, get and sync semantic.
- Recursive cp/mv on remote "folders".
- - Document --recursive and --force for buckets, CloudFront,
- new options --dry-run, --include, etc.
- Allow change /tmp to somewhere else
- With --guess-mime use 'magic' module if available.
+ - Support --preserve for [put] and [get]. Update manpage.
+ - Don't let --continue fail if the file is already fully downloaded.
+ - Option --mime-type should set mime type with 'cp' and 'mv'.
+ If possible --guess-mime-type should do as well.
- For 1.0.0
- Add 'geturl' command, both Unicode and urlencoded output.
View
16 s3cmd
@@ -692,12 +692,9 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
info(u"Verifying attributes...")
cfg = Config()
exists_list = SortedDict()
- if cfg.debug_syncmatch:
- logging.root.setLevel(logging.DEBUG)
for file in src_list.keys():
- if not cfg.debug_syncmatch:
- debug(u"CHECK: %s" % file)
+ debug(u"CHECK: %s" % file)
if dst_list.has_key(file):
## Was --skip-existing requested?
if cfg.skip_existing:
@@ -736,10 +733,6 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
## Remove from destination-list, all that is left there will be deleted
del(dst_list[file])
- if cfg.debug_syncmatch:
- warning(u"Exiting because of --debug-syncmatch")
- sys.exit(1)
-
return src_list, dst_list, exists_list
def cmd_sync_remote2local(args):
@@ -1350,7 +1343,7 @@ def main():
optparser.add_option("-c", "--config", dest="config", metavar="FILE", help="Config file name. Defaults to %default")
optparser.add_option( "--dump-config", dest="dump_config", action="store_true", help="Dump current configuration after parsing config files and command line options and exit.")
- optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though (only for [sync] command)")
+ optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though (only for file transfer commands)")
optparser.add_option("-e", "--encrypt", dest="encrypt", action="store_true", help="Encrypt files before uploading to S3.")
optparser.add_option( "--no-encrypt", dest="encrypt", action="store_false", help="Don't encrypt files.")
@@ -1372,19 +1365,18 @@ def main():
optparser.add_option( "--include-from", dest="include_from", action="append", metavar="FILE", help="Read --include GLOBs from FILE")
optparser.add_option( "--rinclude", dest="rinclude", action="append", metavar="REGEXP", help="Same as --include but uses REGEXP (regular expression) instead of GLOB")
optparser.add_option( "--rinclude-from", dest="rinclude_from", action="append", metavar="FILE", help="Read --rinclude REGEXPs from FILE")
- optparser.add_option( "--debug-syncmatch", "--debug-exclude", dest="debug_syncmatch", action="store_true", help="Output detailed information about remote vs. local filelist matching and --exclude processing and then exit")
optparser.add_option( "--bucket-location", dest="bucket_location", help="Datacentre to create bucket in. Either EU or US (default)")
optparser.add_option("-m", "--mime-type", dest="default_mime_type", type="mimetype", metavar="MIME/TYPE", help="Default MIME-type to be set for objects stored.")
optparser.add_option("-M", "--guess-mime-type", dest="guess_mime_type", action="store_true", help="Guess MIME-type of files by their extension. Falls back to default MIME-Type as specified by --mime-type option")
- optparser.add_option( "--add-header", dest="add_header", action="append", metavar="NAME:VALUE", help="Add a given HTTP header to the upload request. Can be used multiple times. (only for [put] and [sync] commands).")
+ optparser.add_option( "--add-header", dest="add_header", action="append", metavar="NAME:VALUE", help="Add a given HTTP header to the upload request. Can be used multiple times. For instance set 'Expires' or 'Cache-Control' headers (or both) using this options if you like.")
optparser.add_option( "--encoding", dest="encoding", metavar="ENCODING", help="Override autodetected terminal and filesystem encoding (character set). Autodetected: %s" % preferred_encoding)
optparser.add_option( "--list-md5", dest="list_md5", action="store_true", help="Include MD5 sums in bucket listings (only for 'ls' command).")
- optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form.")
+ optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form (eg 1kB instead of 1234).")
optparser.add_option( "--progress", dest="progress_meter", action="store_true", help="Display progress meter (default on TTY).")
optparser.add_option( "--no-progress", dest="progress_meter", action="store_false", help="Don't display progress meter (default on non-TTY).")
View
234 s3cmd.1
@@ -1,6 +1,6 @@
.TH s3cmd 1
.SH NAME
-s3cmd \- tool for managing Amazon S3 storage space
+s3cmd \- tool for managing Amazon S3 storage space and Amazon CloudFront content delivery network
.SH SYNOPSIS
.B s3cmd
[\fIOPTIONS\fR] \fICOMMAND\fR [\fIPARAMETERS\fR]
@@ -42,13 +42,16 @@ Backup a directory tree to S3
\fBsync\fR \fIs3://BUCKET[/PREFIX] LOCAL_DIR\fR
Restore a tree from S3 to local directory
.TP
-\fBcp\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR
-\fBmv\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR
+\fBcp\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR, \fBmv\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR
Make a copy of a file (\fIcp\fR) or move a file (\fImv\fR).
Destination can be in the same bucket with a different name
or in another bucket with the same or different name.
Adding \fI\-\-acl\-public\fR will make the destination object
publicly accessible (see below).
+.TP
+\fBsetacl\fR \fIs3://BUCKET[/OBJECT]\fR
+Modify \fIAccess control list\fI for Bucket or Files. Use with
+\fI\-\-acl\-public\fR or \fI\-\-acl\-private\fR
.TP
\fBinfo\fR \fIs3://BUCKET[/OBJECT]\fR
Get various information about a Bucket or Object
@@ -56,6 +59,24 @@ Get various information about a Bucket or Object
\fBdu\fR \fI[s3://BUCKET[/PREFIX]]\fR
Disk usage \- amount of data stored in S3
+.PP
+Commands for CloudFront management
+.TP
+\fBcflist\fR
+List CloudFront distribution points
+.TP
+\fBcfinfo\fR [\fIcf://DIST_ID\fR]
+Display CloudFront distribution point parameters
+.TP
+\fBcfcreate\fR \fIs3://BUCKET\fR
+Create CloudFront distribution point
+.TP
+\fBcfdelete\fR \fIcf://DIST_ID\fR
+Delete CloudFront distribution point
+.TP
+\fBcfmodify\fR \fIcf://DIST_ID\fR
+Change CloudFront distribution point parameters
+
.SH OPTIONS
.PP
Some of the below specified options can have their default
@@ -63,9 +84,9 @@ values set in
.B s3cmd
config file (by default $HOME/.s3cmd). As it's a simple text file
feel free to open it with your favorite text editor and do any
-changes you like.
+changes you like.
.PP
-Config file related options.
+\fIConfig file related options.\fR
.TP
\fB\-\-configure\fR
Invoke interactive (re)configuration tool. Don't worry, you won't
@@ -78,24 +99,26 @@ Config file name. Defaults to $HOME/.s3cfg
Dump current configuration after parsing config files
and command line options and exit.
.PP
-Most options can have a default value set in the above specified config file.
-.PP
-Options specific to \fBsync\fR command:
+\fIOptions specific for \fIfile transfer commands\fR (\fBsync\fR, \fBput\fR and \fBget\fR):
+.TP
+\fB\-n\fR, \fB\-\-dry\-run\fR
+Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other in
+formation though.
.TP
\fB\-\-delete\-removed\fR
Delete remote objects with no corresponding local file when \fIsync\fRing \fBto\fR S3 or delete local files with no corresponding object in S3 when \fIsync\fRing \fBfrom\fR S3.
.TP
\fB\-\-no\-delete\-removed\fR
-Don't delete remote objects. Default for 'sync' command.
+Don't delete remote objects. Default for \fIsync\fR command.
.TP
\fB\-p\fR, \fB\-\-preserve\fR
-Preserve filesystem attributes (mode, ownership, timestamps). Default for 'sync' command.
+Preserve filesystem attributes (mode, ownership, timestamps). Default for \fIsync\fR command.
.TP
\fB\-\-no\-preserve\fR
Don't store filesystem attributes with uploaded files.
.TP
\fB\-\-exclude GLOB\fR
-Exclude files matching GLOB (a.k.a. shell-style wildcard) from \fIsync\fI. See SYNC COMMAND section for more information.
+Exclude files matching GLOB (a.k.a. shell-style wildcard) from \fIsync\fI. See FILE TRANSFERS section and \fIhttp://s3tools.org/s3cmd-sync\fR for more information.
.TP
\fB\-\-exclude\-from FILE\fR
Same as \-\-exclude but reads GLOBs from the given FILE instead of expecting them on the command line.
@@ -106,31 +129,14 @@ Same as \-\-exclude but works with REGEXPs (Regular expressions).
\fB\-\-rexclude\-from FILE\fR
Same as \-\-exclude\-from but works with REGEXPs.
.TP
-\fB\-\-debug\-syncmatch\fR or \fB\-\-debug\-exclude\fR (alias)
-Display detailed information about matching file names against exclude\-rules as well as information about remote vs local filelists matching. S3cmd exits after performing the match and no actual transfer takes place.
-.\".TP
-.\"\fB\-n\fR, \fB\-\-dry\-run\fR
-.\"Only show what would be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though.
-.PP
-Options common for all commands (where it makes sense indeed):
-.TP
-\fB\-f\fR, \fB\-\-force\fR
-Force overwrite and other dangerous operations.
+\fB\-\-include=GLOB\fR, \fB\-\-include\-from=FILE\fR, \fB\-\-rinclude=REGEXP\fR, \fB\-\-rinclude\-from=FILE\fR
+Filenames and paths matching GLOB or REGEXP will be included even if previously excluded by one of \-\-(r)exclude(\-from) patterns
.TP
\fB\-\-continue\fR
-Continue getting a partially downloaded file (only for \fIget\fR command). This comes handy once download of a large file, say an ISO image, from a S3 bucket fails and a partially downloaded file is left on the disk. Unfortunately \fIput\fR command doesn't support restarting of failed upload due to Amazon S3 limitation.
-.TP
-\fB\-P\fR, \fB\-\-acl\-public\fR
-Store objects with permissions allowing read for anyone.
-.TP
-\fB\-\-acl\-private\fR
-Store objects with default ACL allowing access for you only.
-.TP
-\fB\-\-bucket\-location\fR=BUCKET_LOCATION
-Specify datacentre where to create the bucket. Possible values are \fIUS\fR (default) or \fIEU\fR.
+Continue getting a partially downloaded file (only for \fIget\fR command). This comes handy once download of a large file, say an ISO image, from a S3 bucket fails and a partially downloaded file is left on the disk. Unfortunately \fIput\fR command doesn't support restarting of failed upload due to Amazon S3 limitations.
.TP
-\fB\-e\fR, \fB\-\-encrypt\fR
-Use GPG encryption to protect stored objects from unauthorized access.
+\fB\-\-skip\-existing\fR
+Skip over files that exist at the destination (only for \fIget\fR and \fIsync\fR commands).
.TP
\fB\-m\fR MIME/TYPE, \fB\-\-mime\-type\fR=MIME/TYPE
Default MIME\-type to be set for objects stored.
@@ -140,15 +146,65 @@ Guess MIME\(hytype of files by their extension. Falls
back to default MIME\(hyType as specified by \fB\-\-mime\-type\fR
option
.TP
+\fB\-\-add\-header=NAME:VALUE\fR
+Add a given HTTP header to the upload request. Can be used multiple times with different header names. For instance set 'Expires' or 'Cache-Control' headers (or both) using this options if you like.
+.TP
+\fB\-P\fR, \fB\-\-acl\-public\fR
+Store objects with permissions allowing read for anyone. See \fIhttp://s3tools.org/s3cmd-public\fR for details and hints for storing publicly accessible files.
+.TP
+\fB\-\-acl\-private\fR
+Store objects with default ACL allowing access for you only.
+.TP
+\fB\-e\fR, \fB\-\-encrypt\fR
+Use GPG encryption to protect stored objects from unauthorized access. See \fIhttp://s3tools.org/s3cmd-public\fR for details about encryption.
+.TP
+\fB\-\-no\-encrypt\fR
+Don't encrypt files.
+.PP
+\fIOptions for CloudFront commands\fR:
+.PP
+See \fIhttp://s3tools.org/s3cmd-cloudfront\fR for more details.
+.TP
+\fB\-\-enable\fR
+Enable given CloudFront distribution (only for \fIcfmodify\fR command)
+.TP
+\fB\-\-disable\fR
+Enable given CloudFront distribution (only for \fIcfmodify\fR command)
+.TP
+\fB\-\-cf\-add\-cname=CNAME\fR
+Add given CNAME to a CloudFront distribution (only for \fIcfcreate\fR and \fIcfmodify\fR commands)
+.TP
+\fB\-\-cf\-remove\-cname=CNAME\fR
+Remove given CNAME from a CloudFront distribution (only for \fIcfmodify\fR command)
+.TP
+\fB\-\-cf\-comment=COMMENT\fR
+Set COMMENT for a given CloudFront distribution (only for \fIcfcreate\fR and \fIcfmodify\fR commands)
+.PP
+\fIOptions common for all commands\fR (where it makes sense indeed):
+.TP
+\fB\-r\fR, \fB\-\-recursive\fR
+Recursive upload, download or removal. When used with \fIdel\fR it can
+remove all the files in a bucket.
+.TP
+\fB\-f\fR, \fB\-\-force\fR
+Force overwrite and other dangerous operations. Can be used to remove
+a non\-empty buckets with \fIs3cmd rb \-\-force s3://bkt\fR
+.TP
+\fB\-\-bucket\-location\fR=BUCKET_LOCATION
+Specify datacentre where to create the bucket. Possible values are \fIUS\fR (default) or \fIEU\fR.
+.TP
\fB\-H\fR, \fB\-\-human\-readable\-sizes\fR
Print sizes in human readable form.
-.\".TP
-.\"\fB\-u\fR, \fB\-\-show\-uri\fR
-.\"Show complete S3 URI in listings.
+.TP
+\fB\-\-list\-md5\fR
+Include MD5 sums in bucket listings (only for \fIls\fR command).
.TP
\fB\-\-progress\fR, \fB\-\-no\-progress\fR
Display or don't display progress meter. When running on TTY (e.g. console or xterm) the default is to display progress meter. If not on TTY (e.g. output is redirected somewhere or running from cron) the default is to not display progress meter.
.TP
+\fB\-\-encoding=ENCODING\fR
+Override autodetected terminal and filesystem encoding (character set).
+.TP
\fB\-v\fR, \fB\-\-verbose\fR
Enable verbose output.
.TP
@@ -163,77 +219,101 @@ Show
.B s3cmd
version and exit.
-.SH SYNC COMMAND
+.SH FILE TRANSFERS
One of the most powerful commands of \fIs3cmd\fR is \fBs3cmd sync\fR used for
-synchronising complete directory trees to or from remote S3 storage.
+synchronising complete directory trees to or from remote S3 storage. To some extent
+\fBs3cmd put\fR and \fBs3cmd get\fR share a similar behaviour with \fBsync\fR.
.PP
Basic usage common in backup scenarios is as simple as:
.nf
- s3cmd sync /local/path s3://test-bucket/backup
+ s3cmd sync /local/path/ s3://test-bucket/backup/
.fi
.PP
This command will find all files under /local/path directory and copy them
to corresponding paths under s3://test-bucket/backup on the remote side.
For example:
.nf
-/local/path\fB/file1.ext\fR \-> s3://test-bucket/backup\fB/file1.ext\fR
-/local/path\fB/dir123/file2.bin\fR \-> s3://test-bucket/backup\fB/dir123/file2.bin\fR
+ /local/path/\fBfile1.ext\fR \-> s3://bucket/backup/\fBfile1.ext\fR
+ /local/path/\fBdir123/file2.bin\fR \-> s3://bucket/backup/\fBdir123/file2.bin\fR
.fi
-
+.PP
+However if the local path doesn't end with a slash the last directory's name
+is used on the remote side as well. Compare these with the previous example:
+.nf
+ s3cmd sync /local/path s3://test-bucket/backup/
+.fi
+will sync:
+.nf
+ /local/\fBpath/file1.ext\fR \-> s3://bucket/backup/\fBpath/file1.ext\fR
+ /local/\fBpath/dir123/file2.bin\fR \-> s3://bucket/backup/\fBpath/dir123/file2.bin\fR
+.fi
+.PP
To retrieve the files back from S3 use inverted syntax:
.nf
- s3cmd sync s3://test-bucket/backup/ /tmp/restore
+ s3cmd sync s3://test-bucket/backup/ /tmp/restore/
.fi
that will download files:
.nf
-s3://test-bucket/backup\fB/file1.ext\fR \-> /tmp/restore\fB/file1.ext\fR
-s3://test-bucket/backup\fB/dir123/file2.bin\fR \-> /tmp/restore\fB/dir123/file2.bin\fR
+ s3://bucket/backup/\fBfile1.ext\fR \-> /tmp/restore/\fBfile1.ext\fR
+ s3://bucket/backup/\fBdir123/file2.bin\fR \-> /tmp/restore/\fBdir123/file2.bin\fR
.fi
-
-For the purpose of \fB\-\-exclude\fR and \fB\-\-exclude\-from\fR matching the file name
-\fIalways\fR begins with \fB/\fR (slash) and has the local or remote common part removed.
-For instance in the previous example the file names tested against \-\-exclude list
-will be \fB/\fRfile1.ext and \fB/\fRdir123/file2.bin, that is both with the leading
-slash regardless whether you specified s3://test-bucket/backup or
-s3://test-bucket/backup/ (note the trailing slash) on the command line.
-
-Both \fB\-\-exclude\fR and \fB\-\-exclude\-from\fR work with shell-style wildcards (a.k.a. GLOB).
-For a greater flexibility s3cmd provides Regular-expression versions of the two exclude options
-named \fB\-\-rexclude\fR and \fB\-\-rexclude\-from\fR.
-
-Run s3cmd with \fB\-\-debug\-syncmatch\fR to get detailed information
-about matching file names against exclude rules.
-
-For example to exclude all files with ".bin" extension with a REGEXP use:
.PP
- \-\-rexclude '\.bin$'
+Without the trailing slash on source the behaviour is similar to
+what has been demonstrated with upload:
+.nf
+ s3cmd sync s3://test-bucket/backup /tmp/restore/
+.fi
+will download the files as:
+.nf
+ s3://bucket/\fBbackup/file1.ext\fR \-> /tmp/restore/\fBbackup/file1.ext\fR
+ s3://bucket/\fBbackup/dir123/file2.bin\fR \-> /tmp/restore/\fBbackup/dir123/file2.bin\fR
+.fi
+.PP
+All source file names, the bold ones above, are matched against \fBexclude\fR
+rules and those that match are then re\-checked against \fBinclude\fR rules to see
+whether they should be excluded or kept in the source list.
+.PP
+For the purpose of \fB\-\-exclude\fR and \fB\-\-include\fR matching only the
+bold file names above are used. For instance only \fBpath/file1.ext\fR is tested
+against the patterns, not \fI/local/\fBpath/file1.ext\fR
.PP
-to exclude all hidden files and subdirectories (i.e. those whose name begins with dot ".") use GLOB:
+Both \fB\-\-exclude\fR and \fB\-\-include\fR work with shell-style wildcards (a.k.a. GLOB).
+For a greater flexibility s3cmd provides Regular-expression versions of the two exclude options
+named \fB\-\-rexclude\fR and \fB\-\-rinclude\fR.
+The options with ...\fB\-from\fR suffix (eg \-\-rinclude\-from) expect a filename as
+an argument. Each line of such a file is treated as one pattern.
.PP
- \-\-exclude '/.*'
+There is only one set of patterns built from all \fB\-\-(r)exclude(\-from)\fR options
+and similarly for include variant. Any file excluded with eg \-\-exclude can
+be put back with a pattern found in \-\-rinclude\-from list.
.PP
-on the other hand to exclude only hidden files but not hidden subdirectories use REGEXP:
+Run s3cmd with \fB\-\-dry\-run\fR to verify that your rules work as expected.
+Use together with \fB\-\-debug\fR get detailed information
+about matching file names against exclude and include rules.
.PP
- \-\-rexclude '/\.[^/]*$'
+For example to exclude all files with ".jpg" extension except those beginning with a number use:
.PP
-etc...
+ \-\-exclude '*.jpg' \-\-rinclude '[0-9].*\.jpg'
+
+.SH SEE ALSO
+For the most up to date list of options run
+.B s3cmd \-\-help
+.br
+For more info about usage, examples and other related info visit project homepage at
+.br
+.B http://s3tools.org
.SH AUTHOR
Written by Michal Ludvig <michal@logix.cz>
+.SH CONTACT, SUPPORT
+Prefered way to get support is our mailing list:
+.I s3tools\-general@lists.sourceforge.net
.SH REPORTING BUGS
Report bugs to
-.I s3tools\-general@lists.sourceforge.net
+.I s3tools\-bugs@lists.sourceforge.net
.SH COPYRIGHT
-Copyright \(co 2007,2008 Michal Ludvig <http://www.logix.cz/michal>
+Copyright \(co 2007,2008,2009 Michal Ludvig <http://www.logix.cz/michal>
.br
This is free software. You may redistribute copies of it under the terms of
the GNU General Public License version 2 <http://www.gnu.org/licenses/gpl.html>.
There is NO WARRANTY, to the extent permitted by law.
-.SH SEE ALSO
-For the most up to date list of options run
-.B s3cmd \-\-help
-.br
-For more info about usage, examples and other related info visit project homepage at
-.br
-.B http://s3tools.org
-

0 comments on commit 01a8052

Please sign in to comment.
Something went wrong with that request. Please try again.