Find file
Fetching contributors…
Cannot retrieve contributors at this time
193 lines (173 sloc) 7.06 KB
from fabric.api import *
from fabric.contrib.console import confirm
import os
import shutil
import time
import glob
import hashlib
import yaml = "/bin/sh -c"
env.command_prefixes = [ 'export PATH=$HOME/.virtualenvs/hyde/bin:$PATH',
'export VIRTUAL_ENV=$HOME/.virtualenvs/hyde' ]
conf = "site-production.yaml"
media = yaml.load(file(conf))['media_url']
def _hyde(args):
return local('hyde -x %s' % args)
def regen():
"""Regenerate dev content"""
local('rm -rf deploy')
def gen():
"""Generate dev content"""
def serve():
"""Serve dev content"""
_hyde('serve -a')
def sprite():
"""Regenerate sprites"""
with lcd("content/media/css"):
local("glue --source=../images/l/sprite --output=../images/l --namespace=lf --less=."
" --img=../images/l --ratios=2,1.5,1")
local("sed -e '3i\ .sprite;' sprite.less > luffy.sprite.less")
local("rm sprite.less")
# For the following task, please check that the appropriate
# fonts are installed on the system. The rendering engine of both
# wkhtmltopdf and cutycapt is QT and it doesn't support web fonts
# yet. There is also a bug when multiple fonts are used under the same
# name. Here are the two relevant bugs:
# Google Fonts can be downloaded from:
def screenshots():
"""Generate screenshots"""
now = time.asctime().replace(" ", "-")
for url in ["en/",
for width in [320, 600, 1024, 1280]:
for js in ['on', 'off']:
local("cutycapt "
"--url=http://localhost:8080/{url} "
"--out=screenshots/{now}/{width}px-js{js}-{slug}.png "
"--delay=1000 "
"--javascript={js} "
"--max-wait=5000 "
slug=url.replace("/", "-").replace(".", "-")))
def build():
"""Build production content"""
local("git checkout master")
local("rm -rf .final/*")
_hyde('gen -c %s' % conf)
with lcd(".final"):
for p in [ 'media/images/l/sprite*.png',
'media/css/*.css' ]:
files = local("echo %s" % p, capture=True).split(" ")
for f in files:
# Compute hash
md5 = local("md5sum %s" % f, capture=True).split(" ")[0][:8]
print "[+] MD5 hash for %s is %s" % (f, md5)
# New name
root, ext = os.path.splitext(f)
newname = "%s.%s%s" % (root, md5, ext)
# Symlink
local("cp %s %s" % (f, newname))
# Remove deploy/media
f = f[len('media/'):]
newname = newname[len('media/'):]
if ext == ".png":
# Fix CSS
local("sed -i 's+%s+%s+g' media/css/*.css" % (f, newname))
# Fix HTML
local(r"find . -name '*.html' -type f -print0 | xargs -r0 sed -i "
'"' % (media, f, media, newname))
# Fix permissions
local(r"find * -type f -print0 | xargs -r0 chmod a+r")
local(r"find * -type d -print0 | xargs -r0 chmod a+rx")
local("git add *")
local("git diff --stat HEAD")
if confirm("More diff?", default=True):
local("git diff --word-diff HEAD")
if confirm("Keep?", default=True):
local('git commit -a -m "Autocommit"')
local("git reset --hard")
local("git clean -d -f")
abort("Build rollbacked")
def push():
"""Push built site to production"""
def push_main():
"""Push built site to ace"""
local("git push github")
local("git push")
local("rsync --exclude=.git -a .final/media/")
local("rsync --exclude=.git -a .final/")
def _s3cmd(args):
local("s3cmd --exclude=.git/* --no-preserve --config=./s3cmd.cfg "
"-F -P --no-check-md5 %s" % args)
def push_s3():
"""Push built site to S3"""
# This is a simplified version of the site. Notably, we
# don't have a separate media site.
local(r"find .final/* -type f -print0 | xargs -0 sed -i 's+\(src\|href\)=\"\(%s\|//\)+\1=\"/media/+g'" % media)
# Compress HTML, CSS and JS
for t in "css js html".split():
local(r"find .final/* -type f -name *.%s -exec gzip {} \; -exec mv {}.gz {} \;" % t)
# JS and CSS in media, 30 days, compress
_s3cmd(" --add-header=Cache-Control:'max-age=2592000'" # 30 days
" --add-header=Content-Encoding:'gzip'"
" --mime-type=application/x-javascript"
" --encoding=UTF-8"
" --exclude=* --include=*.js"
" sync .final/media/ s3://")
_s3cmd(" --add-header=Cache-Control:'max-age=2592000'" # 30 days
" --add-header=Content-Encoding:'gzip'"
" --mime-type=text/css"
" --encoding=UTF-8"
" --exclude=* --include=*.css"
" sync .final/media/ s3://")
# Other files in media, 30 days, don't compress
_s3cmd(" --add-header=Cache-Control:'max-age=2592000'" # 30 days
" sync .final/media/ s3://")
_s3cmd(" --add-header=Cache-Control:'max-age=2592000'" # 30 days
" sync .final/media/favicon.ico s3://")
# HTML files, 1h, compress
_s3cmd(" --add-header=Cache-Control:'max-age=3600'" # 1h
" --add-header=Content-Encoding:'gzip'"
" --mime-type=text/html"
" --encoding=UTF-8"
" --exclude=* --include=*.html"
" sync .final/ s3://")
# Remaining files, 1h, don't compress
_s3cmd(" --add-header=Cache-Control:'max-age=3600'" # 1h
" --exclude=media/* --exclude=nginx.conf"
" sync .final/ s3://")
with lcd(".final"):
local("git reset --hard")