Skip to content
This repository

add --delay-updates option #34

Merged
merged 1 commit into from about 1 year ago

2 participants

Matt Domsch Michal Ludvig
Matt Domsch
Collaborator

This code adds --delay-updates to the sync command. The source list is split into two parts: files that are new, and files that are being updated. With --delay-updates, the new files are transferred first, and then the updated files are transferred second.

This will keep a yum or deb repo consistent for a longer period of time, only changing out the updated files (metadata files) at the last possible instance.

Michal Ludvig mludvig merged commit c42c3f2 into from February 18, 2013
Michal Ludvig mludvig closed this February 18, 2013
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Showing 1 unique commit by 1 author.

Mar 01, 2012
Matt Domsch add --delay-updates option c42c3f2
This page is out of date. Refresh to see the latest.
1  S3/Config.py
@@ -54,6 +54,7 @@ class Config(object):
54 54
     ]
55 55
     delete_removed = False
56 56
     _doc['delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted"
  57
+    delay_updates = False
57 58
     gpg_passphrase = ""
58 59
     gpg_command = ""
59 60
     gpg_encrypt = "%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
12  S3/FileLists.py
@@ -262,7 +262,7 @@ def _get_filelist_remote(remote_uri, recursive = True):
262 262
                 remote_list[key] = remote_item
263 263
     return remote_list
264 264
 
265  
-def compare_filelists(src_list, dst_list, src_remote, dst_remote):
  265
+def compare_filelists(src_list, dst_list, src_remote, dst_remote, delay_updates = False):
266 266
     def __direction_str(is_remote):
267 267
         return is_remote and "remote" or "local"
268 268
 
@@ -272,6 +272,7 @@ def __direction_str(is_remote):
272 272
     info(u"Verifying attributes...")
273 273
     cfg = Config()
274 274
     exists_list = SortedDict(ignore_case = False)
  275
+    update_list = SortedDict(ignore_case = False)
275 276
 
276 277
     debug("Comparing filelists (direction: %s -> %s)" % (__direction_str(src_remote), __direction_str(dst_remote)))
277 278
     debug("src_list.keys: %s" % src_list.keys())
@@ -331,10 +332,17 @@ def __direction_str(is_remote):
331 332
                 debug(u"IGNR: %s (transfer not needed)" % file)
332 333
                 exists_list[file] = src_list[file]
333 334
                 del(src_list[file])
  335
+	    else:
  336
+	        if delay_updates:
  337
+	            ## Remove from source-list, all that is left there will be transferred
  338
+		    ## Add to update-list to transfer last
  339
+		    debug(u"XFER UPDATE: %s" % file)
  340
+		    update_list[file] = src_list[file]
  341
+		    del(src_list[file])
334 342
 
335 343
             ## Remove from destination-list, all that is left there will be deleted
336 344
             del(dst_list[file])
337 345
 
338  
-    return src_list, dst_list, exists_list
  346
+    return src_list, dst_list, exists_list, update_list
339 347
 
340 348
 # vim:et:ts=4:sts=4:ai
321  s3cmd
@@ -597,9 +597,10 @@ def cmd_sync_remote2remote(args):
597 597
 
598 598
     src_list, exclude_list = filter_exclude_include(src_list)
599 599
 
600  
-    src_list, dst_list, existing_list = compare_filelists(src_list, dst_list, src_remote = True, dst_remote = True)
  600
+    src_list, dst_list, existing_list, update_list = compare_filelists(src_list, dst_list, src_remote = True, dst_remote = True, delay_updates = cfg.delay_updates)
601 601
 
602 602
     src_count = len(src_list)
  603
+    update_count = len(update_list)
603 604
     dst_count = len(dst_list)
604 605
 
605 606
     print(u"Summary: %d source files to copy, %d files at destination to delete" % (src_count, dst_count))
@@ -631,23 +632,29 @@ def cmd_sync_remote2remote(args):
631 632
                 s3.object_delete(uri)
632 633
                 output(u"deleted: '%s'" % uri)
633 634
 
  635
+    def _upload(src_list, seq, src_count):
  636
+        file_list = src_list.keys()
  637
+        file_list.sort()
  638
+        for file in file_list:
  639
+            seq += 1
  640
+            item = src_list[file]
  641
+            src_uri = S3Uri(item['object_uri_str'])
  642
+            dst_uri = S3Uri(item['target_uri'])
  643
+            seq_label = "[%d of %d]" % (seq, src_count)
  644
+            extra_headers = copy(cfg.extra_headers)
  645
+            try:
  646
+                response = s3.object_copy(src_uri, dst_uri, extra_headers)
  647
+                output("File %(src)s copied to %(dst)s" % { "src" : src_uri, "dst" : dst_uri })
  648
+            except S3Error, e:
  649
+                error("File %(src)s could not be copied: %(e)s" % { "src" : src_uri, "e" : e })
  650
+        return seq
  651
+
634 652
     # Perform the synchronization of files
635 653
     timestamp_start = time.time()
636 654
     seq = 0
637  
-    file_list = src_list.keys()
638  
-    file_list.sort()
639  
-    for file in file_list:
640  
-        seq += 1
641  
-        item = src_list[file]
642  
-        src_uri = S3Uri(item['object_uri_str'])
643  
-        dst_uri = S3Uri(item['target_uri'])
644  
-        seq_label = "[%d of %d]" % (seq, src_count)
645  
-        extra_headers = copy(cfg.extra_headers)
646  
-        try:
647  
-            response = s3.object_copy(src_uri, dst_uri, extra_headers)
648  
-            output("File %(src)s copied to %(dst)s" % { "src" : src_uri, "dst" : dst_uri })
649  
-        except S3Error, e:
650  
-            error("File %(src)s could not be copied: %(e)s" % { "src" : src_uri, "e" : e })
  655
+    seq = _upload(src_list, seq, src_count + update_count)
  656
+    seq = _upload(update_list, seq, src_count + update_count)
  657
+
651 658
     total_elapsed = time.time() - timestamp_start
652 659
     outstr = "Done. Copied %d files in %0.1f seconds, %0.2f files/s" % (seq, total_elapsed, seq/total_elapsed)
653 660
     if seq > 0:
@@ -676,27 +683,32 @@ def cmd_sync_remote2local(args):
676 683
 
677 684
     remote_list, exclude_list = filter_exclude_include(remote_list)
678 685
 
679  
-    remote_list, local_list, existing_list = compare_filelists(remote_list, local_list, src_remote = True, dst_remote = False)
  686
+    remote_list, local_list, existing_list, update_list = compare_filelists(remote_list, local_list, src_remote = True, dst_remote = False, delay_updates = cfg.delay_updates)
680 687
 
681 688
     local_count = len(local_list)
682 689
     remote_count = len(remote_list)
  690
+    update_count = len(update_list)
683 691
 
684  
-    info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count, local_count))
685  
-
686  
-    if not os.path.isdir(destination_base):
687  
-        ## We were either given a file name (existing or not) or want STDOUT
688  
-        if remote_count > 1:
689  
-            raise ParameterError("Destination must be a directory when downloading multiple sources.")
690  
-        remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
691  
-    else:
692  
-        if destination_base[-1] != os.path.sep:
693  
-            destination_base += os.path.sep
694  
-        for key in remote_list:
695  
-            local_filename = destination_base + key
696  
-            if os.path.sep != "/":
697  
-                local_filename = os.path.sep.join(local_filename.split("/"))
698  
-            remote_list[key]['local_filename'] = deunicodise(local_filename)
  692
+    info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count + update_count, local_count))
699 693
 
  694
+    def _set_local_filename(remote_list, destination_base):
  695
+        if not os.path.isdir(destination_base):
  696
+            ## We were either given a file name (existing or not) or want STDOUT
  697
+            if len(remote_list) > 1:
  698
+                raise ParameterError("Destination must be a directory when downloading multiple sources.")
  699
+            remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
  700
+        else:
  701
+            if destination_base[-1] != os.path.sep:
  702
+                destination_base += os.path.sep
  703
+            for key in remote_list:
  704
+                local_filename = destination_base + key
  705
+                if os.path.sep != "/":
  706
+                    local_filename = os.path.sep.join(local_filename.split("/"))
  707
+                remote_list[key]['local_filename'] = deunicodise(local_filename)
  708
+
  709
+    _set_local_filename(remote_list, destination_base)
  710
+    _set_local_filename(update_list, destination_base)
  711
+    
700 712
     if cfg.dry_run:
701 713
         for key in exclude_list:
702 714
             output(u"exclude: %s" % unicodise(key))
@@ -705,6 +717,8 @@ def cmd_sync_remote2local(args):
705 717
                 output(u"delete: %s" % local_list[key]['full_name_unicode'])
706 718
         for key in remote_list:
707 719
             output(u"download: %s -> %s" % (remote_list[key]['object_uri_str'], remote_list[key]['local_filename']))
  720
+        for key in update_list:
  721
+            output(u"download: %s -> %s" % (update_list[key]['object_uri_str'], update_list[key]['local_filename']))
708 722
 
709 723
         warning(u"Exitting now because of --dry-run")
710 724
         return
@@ -714,85 +728,90 @@ def cmd_sync_remote2local(args):
714 728
             os.unlink(local_list[key]['full_name'])
715 729
             output(u"deleted: %s" % local_list[key]['full_name_unicode'])
716 730
 
717  
-    total_size = 0
718  
-    total_elapsed = 0.0
719  
-    timestamp_start = time.time()
720  
-    seq = 0
721  
-    dir_cache = {}
722  
-    file_list = remote_list.keys()
723  
-    file_list.sort()
724  
-    for file in file_list:
725  
-        seq += 1
726  
-        item = remote_list[file]
727  
-        uri = S3Uri(item['object_uri_str'])
728  
-        dst_file = item['local_filename']
729  
-        seq_label = "[%d of %d]" % (seq, remote_count)
730  
-        try:
731  
-            dst_dir = os.path.dirname(dst_file)
732  
-            if not dir_cache.has_key(dst_dir):
733  
-                dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
734  
-            if dir_cache[dst_dir] == False:
735  
-                warning(u"%s: destination directory not writable: %s" % (file, dst_dir))
736  
-                continue
  731
+    def _download(remote_list, seq, total, total_size, dir_cache):
  732
+        file_list = remote_list.keys()
  733
+        file_list.sort()
  734
+        for file in file_list:
  735
+            seq += 1
  736
+            item = remote_list[file]
  737
+            uri = S3Uri(item['object_uri_str'])
  738
+            dst_file = item['local_filename']
  739
+            seq_label = "[%d of %d]" % (seq, total)
737 740
             try:
738  
-                open_flags = os.O_CREAT
739  
-                open_flags |= os.O_TRUNC
740  
-                # open_flags |= os.O_EXCL
741  
-
742  
-                debug(u"dst_file=%s" % unicodise(dst_file))
743  
-                # This will have failed should the file exist
744  
-                os.close(os.open(dst_file, open_flags))
745  
-                # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
746  
-                dst_stream = open(dst_file, "wb")
747  
-                response = s3.object_get(uri, dst_stream, extra_label = seq_label)
748  
-                dst_stream.close()
749  
-                if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
750  
-                    attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
751  
-                    if attrs.has_key('mode'):
752  
-                        os.chmod(dst_file, int(attrs['mode']))
753  
-                    if attrs.has_key('mtime') or attrs.has_key('atime'):
754  
-                        mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
755  
-                        atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
756  
-                        os.utime(dst_file, (atime, mtime))
757  
-                    ## FIXME: uid/gid / uname/gname handling comes here! TODO
758  
-            except OSError, e:
759  
-                try: dst_stream.close()
760  
-                except: pass
761  
-                if e.errno == errno.EEXIST:
762  
-                    warning(u"%s exists - not overwriting" % (dst_file))
763  
-                    continue
764  
-                if e.errno in (errno.EPERM, errno.EACCES):
765  
-                    warning(u"%s not writable: %s" % (dst_file, e.strerror))
  741
+                dst_dir = os.path.dirname(dst_file)
  742
+                if not dir_cache.has_key(dst_dir):
  743
+                    dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
  744
+                if dir_cache[dst_dir] == False:
  745
+                    warning(u"%s: destination directory not writable: %s" % (file, dst_dir))
766 746
                     continue
767  
-                if e.errno == errno.EISDIR:
768  
-                    warning(u"%s is a directory - skipping over" % dst_file)
  747
+                try:
  748
+                    open_flags = os.O_CREAT
  749
+                    open_flags |= os.O_TRUNC
  750
+                    # open_flags |= os.O_EXCL
  751
+
  752
+                    debug(u"dst_file=%s" % unicodise(dst_file))
  753
+                    # This will have failed should the file exist
  754
+                    os.close(os.open(dst_file, open_flags))
  755
+                    # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
  756
+                    dst_stream = open(dst_file, "wb")
  757
+                    response = s3.object_get(uri, dst_stream, extra_label = seq_label)
  758
+                    dst_stream.close()
  759
+                    if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
  760
+                        attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
  761
+                        if attrs.has_key('mode'):
  762
+                            os.chmod(dst_file, int(attrs['mode']))
  763
+                        if attrs.has_key('mtime') or attrs.has_key('atime'):
  764
+                            mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
  765
+                            atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
  766
+                            os.utime(dst_file, (atime, mtime))
  767
+                        ## FIXME: uid/gid / uname/gname handling comes here! TODO
  768
+                except OSError, e:
  769
+                    try: dst_stream.close()
  770
+                    except: pass
  771
+                    if e.errno == errno.EEXIST:
  772
+                        warning(u"%s exists - not overwriting" % (dst_file))
  773
+                        continue
  774
+                    if e.errno in (errno.EPERM, errno.EACCES):
  775
+                        warning(u"%s not writable: %s" % (dst_file, e.strerror))
  776
+                        continue
  777
+                    if e.errno == errno.EISDIR:
  778
+                        warning(u"%s is a directory - skipping over" % dst_file)
  779
+                        continue
  780
+                    raise e
  781
+                except KeyboardInterrupt:
  782
+                    try: dst_stream.close()
  783
+                    except: pass
  784
+                    warning(u"Exiting after keyboard interrupt")
  785
+                    return
  786
+                except Exception, e:
  787
+                    try: dst_stream.close()
  788
+                    except: pass
  789
+                    error(u"%s: %s" % (file, e))
769 790
                     continue
770  
-                raise e
771  
-            except KeyboardInterrupt:
  791
+                # We have to keep repeating this call because
  792
+                # Python 2.4 doesn't support try/except/finally
  793
+                # construction :-(
772 794
                 try: dst_stream.close()
773 795
                 except: pass
774  
-                warning(u"Exiting after keyboard interrupt")
775  
-                return
776  
-            except Exception, e:
777  
-                try: dst_stream.close()
778  
-                except: pass
779  
-                error(u"%s: %s" % (file, e))
  796
+            except S3DownloadError, e:
  797
+                error(u"%s: download failed too many times. Skipping that file." % file)
780 798
                 continue
781  
-            # We have to keep repeating this call because
782  
-            # Python 2.4 doesn't support try/except/finally
783  
-            # construction :-(
784  
-            try: dst_stream.close()
785  
-            except: pass
786  
-        except S3DownloadError, e:
787  
-            error(u"%s: download failed too many times. Skipping that file." % file)
788  
-            continue
789  
-        speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
790  
-        if not Config().progress_meter:
791  
-            output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
792  
-                (uri, unicodise(dst_file), response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
793  
-                seq_label))
794  
-        total_size += response["size"]
  799
+            speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
  800
+            if not Config().progress_meter:
  801
+                output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
  802
+                    (uri, unicodise(dst_file), response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
  803
+                    seq_label))
  804
+            total_size += response["size"]
  805
+        return seq, total_size
795 806
 
  807
+    total_size = 0
  808
+    total_elapsed = 0.0
  809
+    timestamp_start = time.time()
  810
+    dir_cache = {}
  811
+    seq = 0
  812
+    seq, total_size = _download(remote_list, seq, remote_count + update_count, total_size, dir_cache)
  813
+    seq, total_size = _download(update_list, seq, remote_count + update_count, total_size, dir_cache)
  814
+    
796 815
     total_elapsed = time.time() - timestamp_start
797 816
     speed_fmt = formatSize(total_size/total_elapsed, human_readable = True, floating_point = True)
798 817
 
@@ -858,30 +877,37 @@ def cmd_sync_local2remote(args):
858 877
     info(u"Found %d local files, %d remote files" % (local_count, remote_count))
859 878
 
860 879
     local_list, exclude_list = filter_exclude_include(local_list)
861  
-
  880
+        
862 881
     if single_file_local and len(local_list) == 1 and len(remote_list) == 1:
863 882
         ## Make remote_key same as local_key for comparison if we're dealing with only one file
864 883
         remote_list_entry = remote_list[remote_list.keys()[0]]
865 884
         # Flush remote_list, by the way
866 885
         remote_list = { local_list.keys()[0] : remote_list_entry }
867 886
 
868  
-    local_list, remote_list, existing_list = compare_filelists(local_list, remote_list, src_remote = False, dst_remote = True)
  887
+    local_list, remote_list, existing_list, update_list = compare_filelists(local_list, remote_list, src_remote = False, dst_remote = True, delay_updates = cfg.delay_updates)
  888
+
869 889
 
870 890
     local_count = len(local_list)
  891
+    update_count = len(update_list)
871 892
     remote_count = len(remote_list)
872 893
 
873  
-    info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count, remote_count))
  894
+    info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count + update_count, remote_count))
874 895
 
875  
-    if local_count > 0:
876  
-        ## Populate 'remote_uri' only if we've got something to upload
877  
-        if not destination_base.endswith("/"):
878  
-            if not single_file_local:
879  
-                raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
880  
-            local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
881  
-        else:
882  
-            for key in local_list:
883  
-                local_list[key]['remote_uri'] = unicodise(destination_base + key)
884 896
 
  897
+    def _set_remote_uri(local_list, destination_base, single_file_local):
  898
+        if len(local_list) > 0:
  899
+            ## Populate 'remote_uri' only if we've got something to upload
  900
+            if not destination_base.endswith("/"):
  901
+                if not single_file_local:
  902
+                    raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
  903
+                local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
  904
+            else:
  905
+                for key in local_list:
  906
+                    local_list[key]['remote_uri'] = unicodise(destination_base + key)
  907
+
  908
+    _set_remote_uri(local_list, destination_base, single_file_local)
  909
+    _set_remote_uri(update_list, destination_base, single_file_local)
  910
+    
885 911
     if cfg.dry_run:
886 912
         for key in exclude_list:
887 913
             output(u"exclude: %s" % unicodise(key))
@@ -890,6 +916,8 @@ def cmd_sync_local2remote(args):
890 916
                 output(u"delete: %s" % remote_list[key]['object_uri_str'])
891 917
         for key in local_list:
892 918
             output(u"upload: %s -> %s" % (local_list[key]['full_name_unicode'], local_list[key]['remote_uri']))
  919
+        for key in update_list:
  920
+            output(u"upload: %s -> %s" % (update_list[key]['full_name_unicode'], update_list[key]['remote_uri']))
893 921
 
894 922
         warning(u"Exitting now because of --dry-run")
895 923
         return
@@ -904,36 +932,40 @@ def cmd_sync_local2remote(args):
904 932
     total_size = 0
905 933
     total_elapsed = 0.0
906 934
     timestamp_start = time.time()
907  
-    seq = 0
908  
-    file_list = local_list.keys()
909  
-    file_list.sort()
910  
-    for file in file_list:
911  
-        seq += 1
912  
-        item = local_list[file]
913  
-        src = item['full_name']
914  
-        uri = S3Uri(item['remote_uri'])
915  
-        seq_label = "[%d of %d]" % (seq, local_count)
916  
-        extra_headers = copy(cfg.extra_headers)
917  
-        try:
918  
-            if cfg.preserve_attrs:
919  
-                attr_header = _build_attr_header(src)
920  
-                debug(u"attr_header: %s" % attr_header)
921  
-                extra_headers.update(attr_header)
922  
-            response = s3.object_put(src, uri, extra_headers, extra_label = seq_label)
923  
-        except InvalidFileError, e:
924  
-            warning(u"File can not be uploaded: %s" % e)
925  
-            continue
926  
-        except S3UploadError, e:
927  
-            error(u"%s: upload failed too many times. Skipping that file." % item['full_name_unicode'])
928  
-            continue
929  
-        speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
930  
-        if not cfg.progress_meter:
931  
-            output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
932  
-                (item['full_name_unicode'], uri, response["size"], response["elapsed"],
933  
-                speed_fmt[0], speed_fmt[1], seq_label))
934  
-        total_size += response["size"]
935  
-        uploaded_objects_list.append(uri.object())
936 935
 
  936
+    def _upload(local_list, seq, total, total_size):
  937
+        file_list = local_list.keys()
  938
+        file_list.sort()
  939
+        for file in file_list:
  940
+            seq += 1
  941
+            item = local_list[file]
  942
+            src = item['full_name']
  943
+            uri = S3Uri(item['remote_uri'])
  944
+            seq_label = "[%d of %d]" % (seq, total)
  945
+            extra_headers = copy(cfg.extra_headers)
  946
+            try:
  947
+                if cfg.preserve_attrs:
  948
+                    attr_header = _build_attr_header(src)
  949
+                    debug(u"attr_header: %s" % attr_header)
  950
+                    extra_headers.update(attr_header)
  951
+                response = s3.object_put(src, uri, extra_headers, extra_label = seq_label)
  952
+            except InvalidFileError, e:
  953
+                warning(u"File can not be uploaded: %s" % e)
  954
+                continue
  955
+            except S3UploadError, e:
  956
+                error(u"%s: upload failed too many times. Skipping that file." % item['full_name_unicode'])
  957
+                continue
  958
+            speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
  959
+            if not cfg.progress_meter:
  960
+                output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
  961
+                    (item['full_name_unicode'], uri, response["size"], response["elapsed"],
  962
+                    speed_fmt[0], speed_fmt[1], seq_label))
  963
+            total_size += response["size"]
  964
+            uploaded_objects_list.append(uri.object())
  965
+        return seq, total_size
  966
+
  967
+    n, total_size = _upload(local_list, 0, local_count, total_size)
  968
+    n, total_size = _upload(update_list, n, local_count, total_size)
937 969
     total_elapsed = time.time() - timestamp_start
938 970
     total_speed = total_elapsed and total_size/total_elapsed or 0.0
939 971
     speed_fmt = formatSize(total_speed, human_readable = True, floating_point = True)
@@ -1499,6 +1531,7 @@ def main():
1499 1531
 
1500 1532
     optparser.add_option(      "--delete-removed", dest="delete_removed", action="store_true", help="Delete remote objects with no corresponding local file [sync]")
1501 1533
     optparser.add_option(      "--no-delete-removed", dest="delete_removed", action="store_false", help="Don't delete remote objects.")
  1534
+    optparser.add_option(      "--delay-updates", dest="delay_updates", action="store_true", help="Put all updated files into place at end [sync]")
1502 1535
     optparser.add_option("-p", "--preserve", dest="preserve_attrs", action="store_true", help="Preserve filesystem attributes (mode, ownership, timestamps). Default for [sync] command.")
1503 1536
     optparser.add_option(      "--no-preserve", dest="preserve_attrs", action="store_false", help="Don't store FS attributes")
1504 1537
     optparser.add_option(      "--exclude", dest="exclude", action="append", metavar="GLOB", help="Filenames and paths matching GLOB will be excluded from sync")
3  s3cmd.1
@@ -186,6 +186,9 @@ Delete remote objects with no corresponding local file
186 186
 \fB\-\-no\-delete\-removed\fR
187 187
 Don't delete remote objects.
188 188
 .TP
  189
+\fB\-\-delay\-updates\fR
  190
+Put all updated files into place at end [sync]
  191
+.TP
189 192
 \fB\-p\fR, \fB\-\-preserve\fR
190 193
 Preserve filesystem attributes (mode, ownership,
191 194
 timestamps). Default for [sync] command.
Commit_comment_tip

Tip: You can add notes to lines in a file. Hover to the left of a line to make a note

Something went wrong with that request. Please try again.