Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100755 277 lines (254 sloc) 11.384 kb
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
1 #!/usr/bin/env python
2 # Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
3 #
4 # Permission is hereby granted, free of charge, to any person obtaining a
5 # copy of this software and associated documentation files (the
6 # "Software"), to deal in the Software without restriction, including
7 # without limitation the rights to use, copy, modify, merge, publish, dis-
8 # tribute, sublicense, and/or sell copies of the Software, and to permit
9 # persons to whom the Software is furnished to do so, subject to the fol-
10 # lowing conditions:
11 #
12 # The above copyright notice and this permission notice shall be included
13 # in all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 # IN THE SOFTWARE.
22 #
23 import boto
24 import datetime
25 import email
26 import getopt
27 import os
28 import sys
29 import time
30 import mimetypes
31
32
8138585 @jakearchibald Moving favicon to assets & linking from html. Part of #24
authored
33 GZIP_EXTENSIONS = ['.css', '.js', '.ttf', '.appcache', '.ico']
ad6c8fc @jakearchibald Adding appcache
authored
34
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
35
36 def get_headers(fullpath):
37 headers = {}
38 if '/assets/' in fullpath:
39 # HTTP/1.0
40 headers['Expires'] = '%s GMT' % (email.Utils.formatdate(
41 time.mktime((datetime.datetime.now() +
ad6c8fc @jakearchibald Adding appcache
authored
42 datetime.timedelta(days=365 * 2)).timetuple())))
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
43 # HTTP/1.1
44 headers['Cache-Control'] = 'max-age %d' % (3600 * 24 * 365 * 2)
21a05e3 @jakearchibald Reinstating offline, done properly this time
authored
45 else:
46 headers['Cache-Control'] = 'must-revalidate'
47
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
48 return headers
49
50
51 def compress_string(s):
52 """Gzip a given string. Borrowed from django_extensions"""
ad6c8fc @jakearchibald Adding appcache
authored
53 import cStringIO
54 import gzip
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
55 zbuf = cStringIO.StringIO()
56 zfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
57 zfile.write(s)
58 zfile.close()
59 return zbuf.getvalue()
60
61 def is_gzip(filename):
62 """Check if we want to gzip it"""
63 for extension in GZIP_EXTENSIONS:
64 if filename.endswith(extension):
65 return True
66 return False
67
68 usage_string = """
69 SYNOPSIS
70 s3put [-a/--access_key <access_key>] [-s/--secret_key <secret_key>]
71 -b/--bucket <bucket_name> [-c/--callback <num_cb>]
72 [-d/--debug <debug_level>] [-i/--ignore <ignore_dirs>]
73 [-n/--no_op] [-p/--prefix <prefix>] [-q/--quiet]
74 [-g/--grant grant] [-w/--no_overwrite] path
75
76 Where
77 access_key - Your AWS Access Key ID. If not supplied, boto will
78 use the value of the environment variable
79 AWS_ACCESS_KEY_ID
80 secret_key - Your AWS Secret Access Key. If not supplied, boto
81 will use the value of the environment variable
82 AWS_SECRET_ACCESS_KEY
83 bucket_name - The name of the S3 bucket the file(s) should be
84 copied to.
85 path - A path to a directory or file that represents the items
86 to be uploaded. If the path points to an individual file,
87 that file will be uploaded to the specified bucket. If the
88 path points to a directory, s3_it will recursively traverse
89 the directory and upload all files to the specified bucket.
90 debug_level - 0 means no debug output (default), 1 means normal
91 debug output from boto, and 2 means boto debug output
92 plus request/response output from httplib
93 ignore_dirs - a comma-separated list of directory names that will
94 be ignored and not uploaded to S3.
95 num_cb - The number of progress callbacks to display. The default
96 is zero which means no callbacks. If you supplied a value
97 of "-c 10" for example, the progress callback would be
98 called 10 times for each file transferred.
99 prefix - A file path prefix that will be stripped from the full
100 path of the file when determining the key name in S3.
101 For example, if the full path of a file is:
102 /home/foo/bar/fie.baz
103 and the prefix is specified as "-p /home/foo/" the
104 resulting key name in S3 will be:
105 /bar/fie.baz
106 The prefix must end in a trailing separator and if it
107 does not then one will be added.
108 grant - A canned ACL policy that will be granted on each file
109 transferred to S3. The value of provided must be one
110 of the "canned" ACL policies supported by S3:
111 private|public-read|public-read-write|authenticated-read
112 no_overwrite - No files will be overwritten on S3, if the file/key
113 exists on s3 it will be kept. This is useful for
114 resuming interrupted transfers. Note this is not a
115 sync, even if the file has been updated locally if
116 the key exists on s3 the file on s3 will not be
117 updated.
118
119 If the -n option is provided, no files will be transferred to S3 but
120 informational messages will be printed about what would happen.
121 """
ad6c8fc @jakearchibald Adding appcache
authored
122
123
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
124 def usage():
125 print usage_string
126 sys.exit()
ad6c8fc @jakearchibald Adding appcache
authored
127
128
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
129 def submit_cb(bytes_so_far, total_bytes):
130 print '%d bytes transferred / %d bytes total' % (bytes_so_far, total_bytes)
131
ad6c8fc @jakearchibald Adding appcache
authored
132
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
133 def get_key_name(fullpath, prefix):
134 key_name = fullpath[len(prefix):]
135 l = key_name.split(os.sep)
136 return '/'.join(l)
137
ad6c8fc @jakearchibald Adding appcache
authored
138
139 def guess_mime_type(path):
140 if path.endswith('.appcache'):
141 return 'text/cache-manifest'
142 return mimetypes.guess_type(path)[0]
143
144
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
145 def main():
146 try:
147 opts, args = getopt.getopt(sys.argv[1:], 'a:b:c::d:g:hi:np:qs:vw',
148 ['access_key', 'bucket', 'callback', 'debug', 'help', 'grant',
149 'ignore', 'no_op', 'prefix', 'quiet', 'secret_key', 'no_overwrite'])
150 except:
151 usage()
152 ignore_dirs = []
153 aws_access_key_id = None
154 aws_secret_access_key = None
155 bucket_name = ''
156 total = 0
157 debug = 0
158 cb = None
159 num_cb = 0
160 quiet = False
161 no_op = False
162 prefix = '/'
163 grant = None
164 no_overwrite = False
165 for o, a in opts:
166 if o in ('-h', '--help'):
167 usage()
168 sys.exit()
169 if o in ('-a', '--access_key'):
170 aws_access_key_id = a
171 if o in ('-b', '--bucket'):
172 bucket_name = a
173 if o in ('-c', '--callback'):
174 num_cb = int(a)
175 cb = submit_cb
176 if o in ('-d', '--debug'):
177 debug = int(a)
178 if o in ('-g', '--grant'):
179 grant = a
180 if o in ('-i', '--ignore'):
181 ignore_dirs = a.split(',')
182 if o in ('-n', '--no_op'):
183 no_op = True
184 if o in ('w', '--no_overwrite'):
185 no_overwrite = True
186 if o in ('-p', '--prefix'):
187 prefix = a
188 if prefix[-1] != os.sep:
189 prefix = prefix + os.sep
190 if o in ('-q', '--quiet'):
191 quiet = True
192 if o in ('-s', '--secret_key'):
193 aws_secret_access_key = a
194 if len(args) != 1:
195 print usage()
196 path = os.path.expanduser(args[0])
197 path = os.path.expandvars(path)
198 path = os.path.abspath(path)
199 if bucket_name:
200 c = boto.connect_s3(aws_access_key_id=aws_access_key_id,
201 aws_secret_access_key=aws_secret_access_key)
202 c.debug = debug
203 b = c.get_bucket(bucket_name)
204 if os.path.isdir(path):
205 if no_overwrite:
206 if not quiet:
207 print 'Getting list of existing keys to check against'
208 keys = []
209 for key in b.list():
210 keys.append(key.name)
211 for root, dirs, files in os.walk(path):
212 for ignore in ignore_dirs:
213 if ignore in dirs:
214 dirs.remove(ignore)
215 for file in files:
216 fullpath = os.path.join(root, file)
217 key_name = get_key_name(fullpath, prefix)
218 copy_file = True
219 if no_overwrite:
220 if key_name in keys:
221 copy_file = False
222 if not quiet:
223 print 'Skipping %s as it exists in s3' % file
224 if copy_file:
225 if not quiet:
226 print 'Copying %s to %s/%s' % (file, bucket_name, key_name)
227 if not no_op:
228 k = b.new_key(key_name)
229 headers = get_headers(fullpath)
230 if is_gzip(fullpath):
ad6c8fc @jakearchibald Adding appcache
authored
231 content_type = guess_mime_type(fullpath)
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
232 if content_type:
233 headers['Content-Type'] = content_type
234 file_obj = open(fullpath, 'rb')
235 file_size = os.fstat(file_obj.fileno()).st_size
236 filedata = file_obj.read()
237 filedata = compress_string(filedata)
238 headers['Content-Encoding'] = 'gzip'
ad6c8fc @jakearchibald Adding appcache
authored
239 print "\tgzipped: %dk to %dk" % (file_size / 1024, len(filedata) / 1024)
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
240 k.set_contents_from_string(filedata, headers, replace=True)
241 k.make_public()
242 else:
243 k.set_contents_from_filename(fullpath, cb=cb, headers=headers,
244 num_cb=num_cb, policy=grant)
245 total += 1
246 elif os.path.isfile(path):
247 key_name = os.path.split(path)[1]
248 copy_file = True
249 if no_overwrite:
250 if b.get_key(key_name):
251 copy_file = False
252 if not quiet:
253 print 'Skipping %s as it exists in s3' % path
254 if copy_file:
255 k = b.new_key(key_name)
256 headers = get_headers(fullpath)
257 if is_gzip(fullpath):
ad6c8fc @jakearchibald Adding appcache
authored
258 content_type = guess_mime_type(fullpath)
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
259 if content_type:
260 headers['Content-Type'] = content_type
261 file_obj = open(fullpath, 'rb')
262 file_size = os.fstat(file_obj.fileno()).st_size
263 filedata = file_obj.read()
264 filedata = compress_string(filedata)
265 headers['Content-Encoding'] = 'gzip'
ad6c8fc @jakearchibald Adding appcache
authored
266 print "\tgzipped: %dk to %dk" % (file_size / 1024, len(filedata) / 1024)
644a25e @alfredo Deploy script amended. Now with far future expires and gzip
alfredo authored
267 k.set_contents_from_string(filedata, headers, replace=True)
268 k.make_public()
269 else:
270 k.set_contents_from_filename(path, cb=cb, num_cb=num_cb, policy=grant,
271 headers=headers,)
272 else:
273 print usage()
274
275 if __name__ == "__main__":
276 main()
Something went wrong with that request. Please try again.