Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

* s3cmd, S3/AccessLog.py, ...: Added [accesslog] command.

git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@402 830e0280-6d2a-0410-9c65-932aecc39d9d
  • Loading branch information...
commit cb0bbaef88a467be38393569597cd98028db5557 1 parent d852cbb
Michal Ludvig mludvig authored
4 ChangeLog
... ... @@ -1,3 +1,7 @@
  1 +2010-03-19 Michal Ludvig <mludvig@logix.net.nz>
  2 +
  3 + * s3cmd, S3/AccessLog.py, ...: Added [accesslog] command.
  4 +
1 5 2009-12-10 Michal Ludvig <mludvig@logix.net.nz>
2 6
3 7 * s3cmd: Path separator conversion on Windows hosts.
4 NEWS
... ... @@ -1,3 +1,7 @@
  1 +s3cmd 0.9.9.92 - ???
  2 +==============
  3 +* Added [accesslog] command. (needs manpage!)
  4 +
1 5 s3cmd 0.9.9.91 - 2009-10-08
2 6 ==============
3 7 * Fixed invalid reference to a variable in failed upload handling.
19 S3/ACL.py
@@ -3,7 +3,7 @@
3 3 ## http://www.logix.cz/michal
4 4 ## License: GPL Version 2
5 5
6   -from Utils import *
  6 +from Utils import getTreeFromXml
7 7
8 8 try:
9 9 import xml.etree.ElementTree as ET
@@ -12,6 +12,7 @@
12 12
13 13 class Grantee(object):
14 14 ALL_USERS_URI = "http://acs.amazonaws.com/groups/global/AllUsers"
  15 + LOG_DELIVERY_URI = "http://acs.amazonaws.com/groups/s3/LogDelivery"
15 16
16 17 def __init__(self):
17 18 self.xsi_type = None
@@ -53,6 +54,17 @@ def __init__(self):
53 54 self.name = Grantee.ALL_USERS_URI
54 55 self.permission = "READ"
55 56
  57 +class GranteeLogDelivery(Grantee):
  58 + def __init__(self, permission):
  59 + """
  60 + permission must be either READ_ACP or WRITE
  61 + """
  62 + Grantee.__init__(self)
  63 + self.xsi_type = "Group"
  64 + self.tag = "URI"
  65 + self.name = Grantee.LOG_DELIVERY_URI
  66 + self.permission = permission
  67 +
56 68 class ACL(object):
57 69 EMPTY_ACL = "<AccessControlPolicy><Owner><ID></ID></Owner><AccessControlList></AccessControlList></AccessControlPolicy>"
58 70
@@ -109,11 +121,14 @@ def isAnonRead(self):
109 121
110 122 def grantAnonRead(self):
111 123 if not self.isAnonRead():
112   - self.grantees.append(GranteeAnonRead())
  124 + self.appendGrantee(GranteeAnonRead())
113 125
114 126 def revokeAnonRead(self):
115 127 self.grantees = [g for g in self.grantees if not g.isAnonRead()]
116 128
  129 + def appendGrantee(self, grantee):
  130 + self.grantees.append(grantee)
  131 +
117 132 def __str__(self):
118 133 tree = getTreeFromXml(ACL.EMPTY_ACL)
119 134 tree.attrib['xmlns'] = "http://s3.amazonaws.com/doc/2006-03-01/"
90 S3/AccessLog.py
... ... @@ -0,0 +1,90 @@
  1 +## Amazon S3 - Access Control List representation
  2 +## Author: Michal Ludvig <michal@logix.cz>
  3 +## http://www.logix.cz/michal
  4 +## License: GPL Version 2
  5 +
  6 +import S3Uri
  7 +from Exceptions import ParameterError
  8 +from Utils import getTreeFromXml
  9 +from ACL import GranteeAnonRead
  10 +
  11 +try:
  12 + import xml.etree.ElementTree as ET
  13 +except ImportError:
  14 + import elementtree.ElementTree as ET
  15 +
  16 +__all__ = []
  17 +class AccessLog(object):
  18 + LOG_DISABLED = "<BucketLoggingStatus></BucketLoggingStatus>"
  19 + LOG_TEMPLATE = "<LoggingEnabled><TargetBucket></TargetBucket><TargetPrefix></TargetPrefix></LoggingEnabled>"
  20 +
  21 + def __init__(self, xml = None):
  22 + if not xml:
  23 + xml = self.LOG_DISABLED
  24 + self.tree = getTreeFromXml(xml)
  25 + self.tree.attrib['xmlns'] = "http://doc.s3.amazonaws.com/2006-03-01"
  26 +
  27 + def isLoggingEnabled(self):
  28 + return bool(self.tree.find(".//LoggingEnabled"))
  29 +
  30 + def disableLogging(self):
  31 + el = self.tree.find(".//LoggingEnabled")
  32 + if el:
  33 + self.tree.remove(el)
  34 +
  35 + def enableLogging(self, target_prefix_uri):
  36 + el = self.tree.find(".//LoggingEnabled")
  37 + if not el:
  38 + el = getTreeFromXml(self.LOG_TEMPLATE)
  39 + self.tree.append(el)
  40 + el.find(".//TargetBucket").text = target_prefix_uri.bucket()
  41 + el.find(".//TargetPrefix").text = target_prefix_uri.object()
  42 +
  43 + def targetPrefix(self):
  44 + if self.isLoggingEnabled():
  45 + el = self.tree.find(".//LoggingEnabled")
  46 + target_prefix = "s3://%s/%s" % (
  47 + self.tree.find(".//LoggingEnabled//TargetBucket").text,
  48 + self.tree.find(".//LoggingEnabled//TargetPrefix").text)
  49 + return S3Uri.S3Uri(target_prefix)
  50 + else:
  51 + return ""
  52 +
  53 + def setAclPublic(self, acl_public):
  54 + le = self.tree.find(".//LoggingEnabled")
  55 + if not le:
  56 + raise ParameterError("Logging not enabled, can't set default ACL for logs")
  57 + tg = le.find(".//TargetGrants")
  58 + if not acl_public:
  59 + if not tg:
  60 + ## All good, it's not been there
  61 + return
  62 + else:
  63 + le.remove(tg)
  64 + else: # acl_public == True
  65 + anon_read = GranteeAnonRead().getElement()
  66 + if not tg:
  67 + tg = ET.SubElement(le, "TargetGrants")
  68 + ## What if TargetGrants already exists? We should check if
  69 + ## AnonRead is there before appending a new one. Later...
  70 + tg.append(anon_read)
  71 +
  72 + def isAclPublic(self):
  73 + raise NotImplementedError()
  74 +
  75 + def __str__(self):
  76 + return ET.tostring(self.tree)
  77 +__all__.append("AccessLog")
  78 +
  79 +if __name__ == "__main__":
  80 + from S3Uri import S3Uri
  81 + log = AccessLog()
  82 + print log
  83 + log.enableLogging(S3Uri("s3://targetbucket/prefix/log-"))
  84 + print log
  85 + log.setAclPublic(True)
  86 + print log
  87 + log.setAclPublic(False)
  88 + print log
  89 + log.disableLogging()
  90 + print log
2  S3/Config.py
@@ -29,6 +29,7 @@ class Config(object):
29 29 human_readable_sizes = False
30 30 extra_headers = SortedDict(ignore_case = True)
31 31 force = False
  32 + enable = None
32 33 get_continue = False
33 34 skip_existing = False
34 35 recursive = False
@@ -69,6 +70,7 @@ class Config(object):
69 70 debug_include = {}
70 71 encoding = "utf-8"
71 72 urlencoding_mode = "normal"
  73 + log_target_prefix = ""
72 74
73 75 ## Creating a singleton
74 76 def __new__(self, configfile = None):
42 S3/S3.py
@@ -9,6 +9,7 @@
9 9 import httplib
10 10 import logging
11 11 import mimetypes
  12 +import re
12 13 from logging import debug, info, warning, error
13 14 from stat import ST_SIZE
14 15
@@ -22,8 +23,11 @@
22 23 from BidirMap import BidirMap
23 24 from Config import Config
24 25 from Exceptions import *
25   -from ACL import ACL
  26 +from ACL import ACL, GranteeLogDelivery
  27 +from AccessLog import AccessLog
  28 +from S3Uri import S3Uri
26 29
  30 +__all__ = []
27 31 class S3Request(object):
28 32 def __init__(self, s3, method_string, resource, headers, params = {}):
29 33 self.s3 = s3
@@ -322,6 +326,41 @@ def set_acl(self, uri, acl):
322 326 response = self.send_request(request, body)
323 327 return response
324 328
  329 + def get_accesslog(self, uri):
  330 + request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?logging")
  331 + response = self.send_request(request)
  332 + accesslog = AccessLog(response['data'])
  333 + return accesslog
  334 +
  335 + def set_accesslog_acl(self, uri):
  336 + acl = self.get_acl(uri)
  337 + debug("Current ACL(%s): %s" % (uri.uri(), str(acl)))
  338 + acl.appendGrantee(GranteeLogDelivery("READ_ACP"))
  339 + acl.appendGrantee(GranteeLogDelivery("WRITE"))
  340 + debug("Updated ACL(%s): %s" % (uri.uri(), str(acl)))
  341 + self.set_acl(uri, acl)
  342 +
  343 + def set_accesslog(self, uri, enable, log_target_prefix_uri = None, acl_public = False):
  344 + request = self.create_request("BUCKET_CREATE", bucket = uri.bucket(), extra = "?logging")
  345 + accesslog = AccessLog()
  346 + if enable:
  347 + accesslog.enableLogging(log_target_prefix_uri)
  348 + accesslog.setAclPublic(acl_public)
  349 + else:
  350 + accesslog.disableLogging()
  351 + body = str(accesslog)
  352 + debug(u"set_accesslog(%s): accesslog-xml: %s" % (uri, body))
  353 + try:
  354 + response = self.send_request(request, body)
  355 + except S3Error, e:
  356 + if e.info['Code'] == "InvalidTargetBucketForLogging":
  357 + info("Setting up log-delivery ACL for target bucket.")
  358 + self.set_accesslog_acl(S3Uri("s3://%s" % log_target_prefix_uri.bucket()))
  359 + response = self.send_request(request, body)
  360 + else:
  361 + raise
  362 + return accesslog, response
  363 +
325 364 ## Low level methods
326 365 def urlencode_string(self, string, urlencoding_mode = None):
327 366 if type(string) == unicode:
@@ -720,3 +759,4 @@ def check_bucket_name_dns_conformity(bucket):
720 759 return S3.check_bucket_name(bucket, dns_strict = True)
721 760 except ParameterError:
722 761 return False
  762 +__all__.append("S3")
4 S3/S3Uri.py
@@ -8,7 +8,7 @@
8 8 import sys
9 9 from BidirMap import BidirMap
10 10 from logging import debug
11   -from S3 import S3
  11 +import S3
12 12 from Utils import unicodise
13 13
14 14 class S3Uri(object):
@@ -73,7 +73,7 @@ def uri(self):
73 73 return "/".join(["s3:/", self._bucket, self._object])
74 74
75 75 def is_dns_compatible(self):
76   - return S3.check_bucket_name_dns_conformity(self._bucket)
  76 + return S3.S3.check_bucket_name_dns_conformity(self._bucket)
77 77
78 78 def public_url(self):
79 79 if self.is_dns_compatible():
29 S3/Utils.py
@@ -29,6 +29,7 @@
29 29 import elementtree.ElementTree as ET
30 30 from xml.parsers.expat import ExpatError
31 31
  32 +__all__ = []
32 33 def parseNodes(nodes):
33 34 ## WARNING: Ignores text nodes from mixed xml/text.
34 35 ## For instance <tag1>some text<tag2>other text</tag2></tag1>
@@ -44,6 +45,7 @@ def parseNodes(nodes):
44 45 retval_item[name] = node.findtext(".//%s" % child.tag)
45 46 retval.append(retval_item)
46 47 return retval
  48 +__all__.append("parseNodes")
47 49
48 50 def stripNameSpace(xml):
49 51 """
@@ -56,6 +58,7 @@ def stripNameSpace(xml):
56 58 else:
57 59 xmlns = None
58 60 return xml, xmlns
  61 +__all__.append("stripNameSpace")
59 62
60 63 def getTreeFromXml(xml):
61 64 xml, xmlns = stripNameSpace(xml)
@@ -67,11 +70,13 @@ def getTreeFromXml(xml):
67 70 except ExpatError, e:
68 71 error(e)
69 72 raise Exceptions.ParameterError("Bucket contains invalid filenames. Please run: s3cmd fixbucket s3://your-bucket/")
  73 +__all__.append("getTreeFromXml")
70 74
71 75 def getListFromXml(xml, node):
72 76 tree = getTreeFromXml(xml)
73 77 nodes = tree.findall('.//%s' % (node))
74 78 return parseNodes(nodes)
  79 +__all__.append("getListFromXml")
75 80
76 81 def getDictFromTree(tree):
77 82 ret_dict = {}
@@ -86,6 +91,7 @@ def getDictFromTree(tree):
86 91 else:
87 92 ret_dict[child.tag] = child.text or ""
88 93 return ret_dict
  94 +__all__.append("getDictFromTree")
89 95
90 96 def getTextFromXml(xml, xpath):
91 97 tree = getTreeFromXml(xml)
@@ -93,15 +99,18 @@ def getTextFromXml(xml, xpath):
93 99 return tree.text
94 100 else:
95 101 return tree.findtext(xpath)
  102 +__all__.append("getTextFromXml")
96 103
97 104 def getRootTagName(xml):
98 105 tree = getTreeFromXml(xml)
99 106 return tree.tag
  107 +__all__.append("getRootTagName")
100 108
101 109 def xmlTextNode(tag_name, text):
102 110 el = ET.Element(tag_name)
103 111 el.text = unicode(text)
104 112 return el
  113 +__all__.append("xmlTextNode")
105 114
106 115 def appendXmlTextNode(tag_name, text, parent):
107 116 """
@@ -111,22 +120,27 @@ def appendXmlTextNode(tag_name, text, parent):
111 120 Returns the newly created Node.
112 121 """
113 122 parent.append(xmlTextNode(tag_name, text))
  123 +__all__.append("appendXmlTextNode")
114 124
115 125 def dateS3toPython(date):
116 126 date = re.compile("(\.\d*)?Z").sub(".000Z", date)
117 127 return time.strptime(date, "%Y-%m-%dT%H:%M:%S.000Z")
  128 +__all__.append("dateS3toPython")
118 129
119 130 def dateS3toUnix(date):
120 131 ## FIXME: This should be timezone-aware.
121 132 ## Currently the argument to strptime() is GMT but mktime()
122 133 ## treats it as "localtime". Anyway...
123 134 return time.mktime(dateS3toPython(date))
  135 +__all__.append("dateS3toUnix")
124 136
125 137 def dateRFC822toPython(date):
126 138 return rfc822.parsedate(date)
  139 +__all__.append("dateRFC822toPython")
127 140
128 141 def dateRFC822toUnix(date):
129 142 return time.mktime(dateRFC822toPython(date))
  143 +__all__.append("dateRFC822toUnix")
130 144
131 145 def formatSize(size, human_readable = False, floating_point = False):
132 146 size = floating_point and float(size) or int(size)
@@ -139,16 +153,18 @@ def formatSize(size, human_readable = False, floating_point = False):
139 153 return (size, coeff)
140 154 else:
141 155 return (size, "")
  156 +__all__.append("formatSize")
142 157
143 158 def formatDateTime(s3timestamp):
144 159 return time.strftime("%Y-%m-%d %H:%M", dateS3toPython(s3timestamp))
  160 +__all__.append("formatDateTime")
145 161
146 162 def convertTupleListToDict(list):
147 163 retval = {}
148 164 for tuple in list:
149 165 retval[tuple[0]] = tuple[1]
150 166 return retval
151   -
  167 +__all__.append("convertTupleListToDict")
152 168
153 169 _rnd_chars = string.ascii_letters+string.digits
154 170 _rnd_chars_len = len(_rnd_chars)
@@ -158,6 +174,7 @@ def rndstr(len):
158 174 retval += _rnd_chars[random.randint(0, _rnd_chars_len-1)]
159 175 len -= 1
160 176 return retval
  177 +__all__.append("rndstr")
161 178
162 179 def mktmpsomething(prefix, randchars, createfunc):
163 180 old_umask = os.umask(0077)
@@ -175,13 +192,16 @@ def mktmpsomething(prefix, randchars, createfunc):
175 192
176 193 os.umask(old_umask)
177 194 return dirname
  195 +__all__.append("mktmpsomething")
178 196
179 197 def mktmpdir(prefix = "/tmp/tmpdir-", randchars = 10):
180 198 return mktmpsomething(prefix, randchars, os.mkdir)
  199 +__all__.append("mktmpdir")
181 200
182 201 def mktmpfile(prefix = "/tmp/tmpfile-", randchars = 20):
183 202 createfunc = lambda filename : os.close(os.open(filename, os.O_CREAT | os.O_EXCL))
184 203 return mktmpsomething(prefix, randchars, createfunc)
  204 +__all__.append("mktmpfile")
185 205
186 206 def hash_file_md5(filename):
187 207 h = md5()
@@ -194,6 +214,7 @@ def hash_file_md5(filename):
194 214 h.update(data)
195 215 f.close()
196 216 return h.hexdigest()
  217 +__all__.append("hash_file_md5")
197 218
198 219 def mkdir_with_parents(dir_name):
199 220 """
@@ -220,6 +241,7 @@ def mkdir_with_parents(dir_name):
220 241 warning("%s: %s" % (cur_dir, e))
221 242 return False
222 243 return True
  244 +__all__.append("mkdir_with_parents")
223 245
224 246 def unicodise(string, encoding = None, errors = "replace"):
225 247 """
@@ -236,6 +258,7 @@ def unicodise(string, encoding = None, errors = "replace"):
236 258 return string.decode(encoding, errors)
237 259 except UnicodeDecodeError:
238 260 raise UnicodeDecodeError("Conversion to unicode failed: %r" % string)
  261 +__all__.append("unicodise")
239 262
240 263 def deunicodise(string, encoding = None, errors = "replace"):
241 264 """
@@ -253,6 +276,7 @@ def deunicodise(string, encoding = None, errors = "replace"):
253 276 return string.encode(encoding, errors)
254 277 except UnicodeEncodeError:
255 278 raise UnicodeEncodeError("Conversion from unicode failed: %r" % string)
  279 +__all__.append("deunicodise")
256 280
257 281 def unicodise_safe(string, encoding = None):
258 282 """
@@ -261,6 +285,7 @@ def unicodise_safe(string, encoding = None):
261 285 """
262 286
263 287 return unicodise(deunicodise(string, encoding), encoding).replace(u'\ufffd', '?')
  288 +__all__.append("unicodise_safe")
264 289
265 290 def replace_nonprintables(string):
266 291 """
@@ -284,9 +309,11 @@ def replace_nonprintables(string):
284 309 if modified and Config.Config().urlencoding_mode != "fixbucket":
285 310 warning("%d non-printable characters replaced in: %s" % (modified, new_string))
286 311 return new_string
  312 +__all__.append("replace_nonprintables")
287 313
288 314 def sign_string(string_to_sign):
289 315 #debug("string_to_sign: %s" % string_to_sign)
290 316 signature = base64.encodestring(hmac.new(Config.Config().secret_key, string_to_sign, sha1).digest()).strip()
291 317 #debug("signature: %s" % signature)
292 318 return signature
  319 +__all__.append("sign_string")
40 s3cmd
@@ -1122,6 +1122,27 @@ def cmd_setacl(args):
1122 1122 if retsponse['status'] == 200:
1123 1123 output(u"%s: ACL set to %s %s" % (uri, set_to_acl, seq_label))
1124 1124
  1125 +def cmd_accesslog(args):
  1126 + s3 = S3(cfg)
  1127 + bucket_uri = S3Uri(args.pop())
  1128 + if bucket_uri.object():
  1129 + raise ParameterError("Only bucket name is required for [accesslog] command")
  1130 + if cfg.enable == True:
  1131 + log_target_prefix_uri = S3Uri(cfg.log_target_prefix)
  1132 + if log_target_prefix_uri.type != "s3":
  1133 + raise ParameterError("--log-target-prefix must be a S3 URI")
  1134 + accesslog, response = s3.set_accesslog(bucket_uri, enable = True, log_target_prefix_uri = log_target_prefix_uri, acl_public = cfg.acl_public)
  1135 + elif cfg.enable == False:
  1136 + accesslog, response = s3.set_accesslog(bucket_uri, enable = False)
  1137 + else: # cfg.enable == None
  1138 + accesslog = s3.get_accesslog(bucket_uri)
  1139 +
  1140 + output(u"Access logging for: %s" % bucket_uri.uri())
  1141 + output(u" Logging Enabled: %s" % accesslog.isLoggingEnabled())
  1142 + if accesslog.isLoggingEnabled():
  1143 + output(u" Target prefix: %s" % accesslog.targetPrefix().uri())
  1144 + #output(u" Public Access: %s" % accesslog.isAclPublic())
  1145 +
1125 1146 def cmd_sign(args):
1126 1147 string_to_sign = args.pop()
1127 1148 debug("string-to-sign: %r" % string_to_sign)
@@ -1426,6 +1447,7 @@ def get_commands_list():
1426 1447 {"cmd":"cp", "label":"Copy object", "param":"s3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]", "func":cmd_cp, "argc":2},
1427 1448 {"cmd":"mv", "label":"Move object", "param":"s3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]", "func":cmd_mv, "argc":2},
1428 1449 {"cmd":"setacl", "label":"Modify Access control list for Bucket or Files", "param":"s3://BUCKET[/OBJECT]", "func":cmd_setacl, "argc":1},
  1450 + {"cmd":"accesslog", "label":"Enable/disable bucket access logging", "param":"s3://BUCKET", "func":cmd_accesslog, "argc":1},
1429 1451 {"cmd":"sign", "label":"Sign arbitrary string using the secret key", "param":"STRING-TO-SIGN", "func":cmd_sign, "argc":1},
1430 1452 {"cmd":"fixbucket", "label":"Fix invalid file names in a bucket", "param":"s3://BUCKET[/PREFIX]", "func":cmd_fixbucket, "argc":1},
1431 1453
@@ -1516,6 +1538,8 @@ def main():
1516 1538
1517 1539 optparser.add_option( "--bucket-location", dest="bucket_location", help="Datacentre to create bucket in. Either EU or US (default)")
1518 1540
  1541 + optparser.add_option( "--log-target-prefix", dest="log_target_prefix", help="Target prefix for access logs (S3 URI)")
  1542 +
1519 1543 optparser.add_option("-m", "--mime-type", dest="default_mime_type", type="mimetype", metavar="MIME/TYPE", help="Default MIME-type to be set for objects stored.")
1520 1544 optparser.add_option("-M", "--guess-mime-type", dest="guess_mime_type", action="store_true", help="Guess MIME-type of files by their extension. Falls back to default MIME-Type as specified by --mime-type option")
1521 1545
@@ -1529,8 +1553,8 @@ def main():
1529 1553
1530 1554 optparser.add_option( "--progress", dest="progress_meter", action="store_true", help="Display progress meter (default on TTY).")
1531 1555 optparser.add_option( "--no-progress", dest="progress_meter", action="store_false", help="Don't display progress meter (default on non-TTY).")
1532   - optparser.add_option( "--enable", dest="cf_enable", action="store_true", help="Enable given CloudFront distribution (only for [cfmodify] command)")
1533   - optparser.add_option( "--disable", dest="cf_enable", action="store_false", help="Enable given CloudFront distribution (only for [cfmodify] command)")
  1556 + optparser.add_option( "--enable", dest="enable", action="store_true", help="Enable given CloudFront distribution (for [cfmodify] command) or access logging (for [accesslog] command)")
  1557 + optparser.add_option( "--disable", dest="enable", action="store_false", help="Enable given CloudFront distribution (only for [cfmodify] command) or access logging (for [accesslog] command)")
1534 1558 optparser.add_option( "--cf-add-cname", dest="cf_cnames_add", action="append", metavar="CNAME", help="Add given CNAME to a CloudFront distribution (only for [cfcreate] and [cfmodify] commands)")
1535 1559 optparser.add_option( "--cf-remove-cname", dest="cf_cnames_remove", action="append", metavar="CNAME", help="Remove given CNAME from a CloudFront distribution (only for [cfmodify] command)")
1536 1560 optparser.add_option( "--cf-comment", dest="cf_comment", action="store", metavar="COMMENT", help="Set COMMENT for a given CloudFront distribution (only for [cfcreate] and [cfmodify] commands)")
@@ -1617,7 +1641,13 @@ def main():
1617 1641 except AttributeError:
1618 1642 ## Some Config() options are not settable from command line
1619 1643 pass
1620   -
  1644 +
  1645 + ## Special handling for tri-state options (True, False, None)
  1646 + cfg.update_option("enable", options.enable)
  1647 +
  1648 + ## CloudFront's cf_enable and Config's enable share the same --enable switch
  1649 + options.cf_enable = options.enable
  1650 +
1621 1651 ## Update CloudFront options if some were set
1622 1652 for option in CfCmd.options.option_list():
1623 1653 try:
@@ -1735,9 +1765,9 @@ if __name__ == '__main__':
1735 1765 ## detect any syntax errors in there
1736 1766 from S3.Exceptions import *
1737 1767 from S3 import PkgInfo
1738   - from S3.S3 import *
  1768 + from S3.S3 import S3
1739 1769 from S3.Config import Config
1740   - from S3.S3Uri import *
  1770 + from S3.S3Uri import S3Uri
1741 1771 from S3 import Utils
1742 1772 from S3.Utils import unicodise
1743 1773 from S3.Progress import Progress

0 comments on commit cb0bbae

Please sign in to comment.
Something went wrong with that request. Please try again.