Skip to content
Browse files

Merge branch 'release/0.6b2'

  • Loading branch information...
2 parents 55a02cf + c199bff commit 253efbb2066200778a897187de513ab8f6736d91 @jezdez jezdez committed Feb 17, 2011
View
30 LICENSE
@@ -75,3 +75,33 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
+
+utils.cached_property extracted from Celery
+-------------------------------------------
+Copyright (c) 2009-2011, Ask Solem and contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+Neither the name of Ask Solem nor the names of its contributors may be used
+to endorse or promote products derived from this software without specific
+prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
View
2 compressor/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (0, 6, 0, "b", 1) # following PEP 386
+VERSION = (0, 6, 0, "b", 2) # following PEP 386
DEV_N = None
View
145 compressor/base.py
@@ -1,144 +1,137 @@
import os
+from itertools import chain
from django.template.loader import render_to_string
from django.core.files.base import ContentFile
-from compressor import filters
from compressor.cache import get_hexdigest, get_mtime
from compressor.conf import settings
from compressor.exceptions import UncompressableFileError
-from compressor.utils import get_class
+from compressor.storage import default_storage
+from compressor.utils import get_class, cached_property
class Compressor(object):
- def __init__(self, content, output_prefix="compressed"):
- self.content = content
+ def __init__(self, content=None, output_prefix="compressed"):
+ self.content = content or ""
+ self.extra_context = {}
self.type = None
self.output_prefix = output_prefix
self.split_content = []
- self._parser = None
+ self.charset = settings.DEFAULT_CHARSET
def split_contents(self):
- raise NotImplementedError('split_contents must be defined in a subclass')
+ raise NotImplementedError(
+ "split_contents must be defined in a subclass")
def get_filename(self, url):
try:
base_url = self.storage.base_url
except AttributeError:
base_url = settings.COMPRESS_URL
-
if not url.startswith(base_url):
- raise UncompressableFileError('"%s" is not in COMPRESS_URL ("%s") and can not be compressed' % (url, base_url))
+ raise UncompressableFileError(
+ "'%s' is not in COMPRESS_URL ('%s') and can not be compressed"
+ % (url, base_url))
basename = url.replace(base_url, "", 1)
filename = os.path.join(settings.COMPRESS_ROOT, basename)
if not os.path.exists(filename):
- raise UncompressableFileError('"%s" does not exist' % filename)
+ raise UncompressableFileError("'%s' does not exist" % filename)
return filename
- def _get_parser(self):
- if self._parser:
- return self._parser
- parser_cls = get_class(settings.COMPRESS_PARSER)
- self._parser = parser_cls(self.content)
- return self._parser
+ @cached_property
+ def parser(self):
+ return get_class(settings.COMPRESS_PARSER)(self.content)
- def _set_parser(self, parser):
- self._parser = parser
- parser = property(_get_parser, _set_parser)
+ @cached_property
+ def cached_filters(self):
+ return [get_class(filter_cls) for filter_cls in self.filters]
- @property
+ @cached_property
def mtimes(self):
- return [get_mtime(h[1]) for h in self.split_contents() if h[0] == 'file']
+ for kind, value, elem in self.split_contents():
+ if kind == 'file':
+ yield str(get_mtime(value))
- @property
+ @cached_property
def cachekey(self):
- cachebits = [self.content]
- cachebits.extend([str(m) for m in self.mtimes])
- cachestr = "".join(cachebits).encode(settings.DEFAULT_CHARSET)
+ cachestr = "".join(
+ chain([self.content], self.mtimes)).encode(self.charset)
return "django_compressor.%s" % get_hexdigest(cachestr)[:12]
- @property
+ @cached_property
def storage(self):
- from compressor.storage import default_storage
return default_storage
- @property
+ @cached_property
def hunks(self):
- if getattr(self, '_hunks', ''):
- return self._hunks
- self._hunks = []
- for kind, v, elem in self.split_contents():
+ for kind, value, elem in self.split_contents():
attribs = self.parser.elem_attribs(elem)
- if kind == 'hunk':
- input = v
- if self.filters:
- input = self.filter(input, 'input', elem=elem)
+ if kind == "hunk":
# Let's cast BeautifulSoup element to unicode here since
# it will try to encode using ascii internally later
- self._hunks.append(unicode(input))
- if kind == 'file':
- # TODO: wrap this in a try/except for IoErrors(?)
- fd = open(v, 'rb')
- input = fd.read()
- if self.filters:
- input = self.filter(input, 'input', filename=v, elem=elem)
- charset = attribs.get('charset', settings.DEFAULT_CHARSET)
- self._hunks.append(unicode(input, charset))
- fd.close()
- return self._hunks
+ yield unicode(self.filter(value, "input", elem=elem))
+ elif kind == "file":
+ content = ""
+ try:
+ fd = open(value, 'rb')
+ try:
+ content = fd.read()
+ finally:
+ fd.close()
+ except IOError, e:
+ raise UncompressableFileError(
+ "IOError while processing '%s': %s" % (value, e))
+ content = self.filter(content, "input", filename=value, elem=elem)
+ yield unicode(content, attribs.get("charset", self.charset))
def concat(self):
- # Design decision needed: either everything should be unicode up to
- # here or we encode strings as soon as we acquire them. Currently
- # concat() expects all hunks to be unicode and does the encoding
- return "\n".join([hunk.encode(settings.DEFAULT_CHARSET) for hunk in self.hunks])
+ return "\n".join((hunk.encode(self.charset) for hunk in self.hunks))
def filter(self, content, method, **kwargs):
- for f in self.filters:
- filter = getattr(get_class(f)(content, filter_type=self.type), method)
+ for filter_cls in self.cached_filters:
+ filter_func = getattr(
+ filter_cls(content, filter_type=self.type), method)
try:
- if callable(filter):
- content = filter(**kwargs)
+ if callable(filter_func):
+ content = filter_func(**kwargs)
except NotImplementedError:
pass
return content
- @property
+ @cached_property
def combined(self):
- if getattr(self, '_output', ''):
- return self._output
- output = self.concat()
- if self.filters:
- output = self.filter(output, 'output')
- self._output = output
- return self._output
-
- @property
+ return self.filter(self.concat(), 'output')
+
+ @cached_property
def hash(self):
return get_hexdigest(self.combined)[:12]
- @property
+ @cached_property
def new_filepath(self):
- filename = "".join([self.hash, self.extension])
- return os.path.join(
- settings.COMPRESS_OUTPUT_DIR.strip(os.sep), self.output_prefix, filename)
+ return os.path.join(settings.COMPRESS_OUTPUT_DIR.strip(os.sep),
+ self.output_prefix, "%s.%s" % (self.hash, self.type))
def save_file(self):
if self.storage.exists(self.new_filepath):
return False
self.storage.save(self.new_filepath, ContentFile(self.combined))
return True
- def output(self):
- if not settings.COMPRESS_ENABLED:
+ def output(self, forced=False):
+ if not settings.COMPRESS_ENABLED and not forced:
return self.content
- self.save_file()
- context = getattr(self, 'extra_context', {})
- context['url'] = self.storage.url(self.new_filepath)
+ context = {
+ "saved": self.save_file(),
+ "url": self.storage.url(self.new_filepath),
+ }
+ context.update(self.extra_context)
return render_to_string(self.template_name, context)
def output_inline(self):
- context = {'content': settings.COMPRESS_ENABLED and self.combined or self.concat()}
- if hasattr(self, 'extra_context'):
- context.update(self.extra_context)
+ if settings.COMPRESS_ENABLED:
+ content = self.combined
+ else:
+ content = self.concat()
+ context = dict(content=content, **self.extra_context)
return render_to_string(self.template_name_inline, context)
View
17 compressor/css.py
@@ -4,9 +4,8 @@
class CssCompressor(Compressor):
- def __init__(self, content, output_prefix="css"):
+ def __init__(self, content=None, output_prefix="css"):
super(CssCompressor, self).__init__(content, output_prefix)
- self.extension = ".css"
self.template_name = "compressor/css.html"
self.template_name_inline = "compressor/css_inline.html"
self.filters = list(settings.COMPRESS_CSS_FILTERS)
@@ -37,19 +36,19 @@ def split_contents(self):
if self.media_nodes and self.media_nodes[-1][0] == media:
self.media_nodes[-1][1].split_content.append(data)
else:
- node = CssCompressor(content='')
+ node = CssCompressor()
node.split_content.append(data)
self.media_nodes.append((media, node))
return self.split_content
- def output(self):
+ def output(self, forced=False):
self.split_contents()
if not hasattr(self, 'media_nodes'):
- return super(CssCompressor, self).output()
- if not settings.COMPRESS_ENABLED:
+ return super(CssCompressor, self).output(forced=forced)
+ if not settings.COMPRESS_ENABLED and not forced:
return self.content
ret = []
for media, subnode in self.media_nodes:
- subnode.extra_context = {'media': media}
- ret.append(subnode.output())
- return ''.join(ret)
+ subnode.extra_context.update({'media': media})
+ ret.append(subnode.output(forced=forced))
+ return "".join(ret)
View
5 compressor/filters/cssmin/cssmin.py
@@ -29,7 +29,10 @@
"""`cssmin` - A Python port of the YUI CSS compressor."""
-from StringIO import StringIO # The pure-Python StringIO supports unicode.
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
import re
View
12 compressor/filters/jsmin/jsmin.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
# This code is original from jsmin by Douglas Crockford, it was translated to
# Python by Baruch Even. The original code had the following copyright and
@@ -29,8 +29,10 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# */
-
-from StringIO import StringIO
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
def jsmin(js):
ins = StringIO(js)
@@ -87,10 +89,10 @@ def _peek(self):
def _next(self):
"""get the next character, excluding comments. peek() is used to see
- if a '/' is followed by a '/' or '*'.
+ if an unescaped '/' is followed by a '/' or '*'.
"""
c = self._get()
- if c == '/':
+ if c == '/' and self.theA != '\\':
p = self._peek()
if p == '/':
c = self._get()
View
2 compressor/filters/yui.py
@@ -11,7 +11,7 @@ def output(self, **kwargs):
arguments = ''
if self.type == 'js':
arguments = settings.COMPRESS_YUI_JS_ARGUMENTS
- if self.type == 'css':
+ elif self.type == 'css':
arguments = settings.COMPRESS_YUI_CSS_ARGUMENTS
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, self.type, arguments)
View
5 compressor/js.py
@@ -5,12 +5,11 @@
class JsCompressor(Compressor):
- def __init__(self, content, output_prefix="js"):
+ def __init__(self, content=None, output_prefix="js"):
super(JsCompressor, self).__init__(content, output_prefix)
- self.extension = ".js"
self.template_name = "compressor/js.html"
self.template_name_inline = "compressor/js_inline.html"
- self.filters = settings.COMPRESS_JS_FILTERS
+ self.filters = list(settings.COMPRESS_JS_FILTERS)
self.type = 'js'
def split_contents(self):
View
2 compressor/management/commands/compress.py
@@ -139,7 +139,7 @@ def compress(self, log=None, **options):
for nodes in compressor_nodes.values():
for node in nodes:
key = get_offline_cachekey(node.nodelist)
- result = node.render(context, compress=True, offline=False)
+ result = node.render(context, forced=True)
cache.set(key, result, settings.COMPRESS_OFFLINE_TIMEOUT)
results.append(result)
count += 1
View
3 compressor/settings.py
@@ -16,6 +16,9 @@ class CompressorSettings(AppSettings):
OUTPUT_DIR = 'cache'
STORAGE = 'compressor.storage.CompressorFileStorage'
+ CSS_COMPRESSOR = "compressor.css.CssCompressor"
+ JS_COMPRESSOR = "compressor.js.JsCompressor"
+
URL = None
ROOT = None
View
17 compressor/storage.py
@@ -1,3 +1,5 @@
+import gzip
+
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.functional import LazyObject
@@ -19,6 +21,21 @@ def __init__(self, location=None, base_url=None, *args, **kwargs):
super(CompressorFileStorage, self).__init__(location, base_url,
*args, **kwargs)
+class GzipCompressorFileStorage(CompressorFileStorage):
+ """
+ The standard compressor file system storage that gzips storage files
+ additionally to the usual files.
+ """
+ def url(self, name):
+ return u'%s.gz' % super(GzipCompressorFileStorage, self).url(name)
+
+ def save(self, filename, content):
+ filename = super(GzipCompressorFileStorage, self).save(filename, content)
+ out = gzip.open(u'%s.gz' % self.path(filename), 'wb')
+ out.writelines(open(self.path(filename), 'rb'))
+ out.close()
+
+
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class(settings.COMPRESS_STORAGE)()
View
59 compressor/templatetags/compress.py
@@ -1,15 +1,19 @@
import time
from django import template
+from django.core.exceptions import ImproperlyConfigured
from compressor.cache import cache, get_offline_cachekey
from compressor.conf import settings
-from compressor.css import CssCompressor
-from compressor.js import JsCompressor
-
+from compressor.utils import get_class
OUTPUT_FILE = 'file'
OUTPUT_INLINE = 'inline'
+OUTPUT_MODES = (OUTPUT_FILE, OUTPUT_INLINE)
+COMPRESSORS = {
+ "css": settings.COMPRESS_CSS_COMPRESSOR,
+ "js": settings.COMPRESS_JS_COMPRESSOR,
+}
register = template.Library()
@@ -18,6 +22,8 @@ def __init__(self, nodelist, kind=None, mode=OUTPUT_FILE):
self.nodelist = nodelist
self.kind = kind
self.mode = mode
+ self.compressor_cls = get_class(
+ COMPRESSORS.get(self.kind), exception=ImproperlyConfigured)
def cache_get(self, key):
packed_val = cache.get(key)
@@ -37,31 +43,33 @@ def cache_set(self, key, val, timeout=settings.COMPRESS_REBUILD_TIMEOUT, refresh
packed_val = (val, refresh_time, refreshed)
return cache.set(key, packed_val, real_timeout)
- def render(self, context, compress=settings.COMPRESS_ENABLED, offline=settings.COMPRESS_OFFLINE):
- if compress and offline:
+ def cache_key(self, compressor):
+ return "%s.%s.%s" % (compressor.cachekey, self.mode, self.kind)
+
+ def render(self, context, forced=False):
+ if (settings.COMPRESS_ENABLED and settings.COMPRESS_OFFLINE) and not forced:
key = get_offline_cachekey(self.nodelist)
content = cache.get(key)
if content:
return content
content = self.nodelist.render(context)
- if offline or not compress or not len(content.strip()):
+ if (not settings.COMPRESS_ENABLED or not len(content.strip())) and not forced:
return content
- if self.kind == 'css':
- compressor = CssCompressor(content)
- if self.kind == 'js':
- compressor = JsCompressor(content)
- cachekey = "%s.%s" % (compressor.cachekey, self.mode)
+ compressor = self.compressor_cls(content)
+ cachekey = self.cache_key(compressor)
output = self.cache_get(cachekey)
- if output is None or not offline:
+ if output is None or forced:
try:
- if self.mode == OUTPUT_FILE:
- output = compressor.output()
- else:
- output = compressor.output_inline()
+ if self.mode == OUTPUT_INLINE:
+ return compressor.output_inline()
+ output = compressor.output(forced=forced)
self.cache_set(cachekey, output)
except:
- from traceback import format_exc
- raise Exception(format_exc())
+ if settings.DEBUG:
+ from traceback import format_exc
+ raise Exception(format_exc())
+ else:
+ return content
return output
@register.tag
@@ -109,17 +117,20 @@ def compress(parser, token):
args = token.split_contents()
if not len(args) in (2, 3):
- raise template.TemplateSyntaxError("%r tag requires either one or two arguments." % args[0])
+ raise template.TemplateSyntaxError(
+ "%r tag requires either one or two arguments." % args[0])
kind = args[1]
- if not kind in ['css', 'js']:
- raise template.TemplateSyntaxError("%r's argument must be 'js' or 'css'." % args[0])
+ if not kind in COMPRESSORS.keys():
+ raise template.TemplateSyntaxError(
+ "%r's argument must be 'js' or 'css'." % args[0])
if len(args) == 3:
mode = args[2]
- if not mode in (OUTPUT_FILE, OUTPUT_INLINE):
- raise template.TemplateSyntaxError("%r's second argument must be '%s' or '%s'." % (args[0], OUTPUT_FILE, OUTPUT_INLINE))
+ if not mode in OUTPUT_MODES:
+ raise template.TemplateSyntaxError(
+ "%r's second argument must be '%s' or '%s'." %
+ (args[0], OUTPUT_FILE, OUTPUT_INLINE))
else:
mode = OUTPUT_FILE
-
return CompressorNode(nodelist, kind, mode)
View
16 compressor/tests/storage.py
@@ -1,16 +0,0 @@
-import gzip
-from compressor.storage import CompressorFileStorage
-
-
-class TestStorage(CompressorFileStorage):
- """
- Test compressor storage that gzips storage files
- """
- def url(self, name):
- return u'%s.gz' % super(TestStorage, self).url(name)
-
- def save(self, filename, content):
- filename = super(TestStorage, self).save(filename, content)
- out = gzip.open(u'%s.gz' % self.path(filename), 'wb')
- out.writelines(open(self.path(filename), 'rb'))
- out.close()
View
90 compressor/tests/tests.py
@@ -12,7 +12,7 @@
from django.template import Template, Context, TemplateSyntaxError
from django.test import TestCase
-from compressor import storage
+from compressor import base
from compressor.cache import get_hashed_mtime
from compressor.conf import settings
from compressor.css import CssCompressor
@@ -29,92 +29,98 @@ def setUp(self):
<style type="text/css">p { border:5px solid green;}</style>
<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8">
"""
- self.cssNode = CssCompressor(self.css)
+ self.css_node = CssCompressor(self.css)
self.js = """
<script src="/media/js/one.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript" charset="utf-8">obj.value = "value";</script>
"""
- self.jsNode = JsCompressor(self.js)
+ self.js_node = JsCompressor(self.js)
def test_css_split(self):
out = [
('file', os.path.join(settings.COMPRESS_ROOT, u'css/one.css'), u'<link rel="stylesheet" href="/media/css/one.css" type="text/css" charset="utf-8" />'),
('hunk', u'p { border:5px solid green;}', u'<style type="text/css">p { border:5px solid green;}</style>'),
('file', os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8" />'),
]
- split = self.cssNode.split_contents()
- split = [(x[0], x[1], self.cssNode.parser.elem_str(x[2])) for x in split]
+ split = self.css_node.split_contents()
+ split = [(x[0], x[1], self.css_node.parser.elem_str(x[2])) for x in split]
self.assertEqual(out, split)
def test_css_hunks(self):
out = ['body { background:#990; }', u'p { border:5px solid green;}', 'body { color:#fff; }']
- self.assertEqual(out, self.cssNode.hunks)
+ self.assertEqual(out, list(self.css_node.hunks))
def test_css_output(self):
out = u'body { background:#990; }\np { border:5px solid green;}\nbody { color:#fff; }'
- self.assertEqual(out, self.cssNode.combined)
+ self.assertEqual(out, self.css_node.combined)
def test_css_mtimes(self):
is_date = re.compile(r'^\d{10}[\.\d]+$')
- for date in self.cssNode.mtimes:
+ for date in self.css_node.mtimes:
self.assert_(is_date.match(str(float(date))), "mtimes is returning something that doesn't look like a date: %s" % date)
def test_css_return_if_off(self):
settings.COMPRESS_ENABLED = False
- self.assertEqual(self.css, self.cssNode.output())
+ self.assertEqual(self.css, self.css_node.output())
def test_cachekey(self):
is_cachekey = re.compile(r'django_compressor\.\w{12}')
- self.assert_(is_cachekey.match(self.cssNode.cachekey), "cachekey is returning something that doesn't look like r'django_compressor\.\w{12}'")
+ self.assert_(is_cachekey.match(self.css_node.cachekey), "cachekey is returning something that doesn't look like r'django_compressor\.\w{12}'")
def test_css_hash(self):
- self.assertEqual('f7c661b7a124', self.cssNode.hash)
+ self.assertEqual('f7c661b7a124', self.css_node.hash)
def test_css_return_if_on(self):
output = u'<link rel="stylesheet" href="/media/cache/css/f7c661b7a124.css" type="text/css">'
- self.assertEqual(output, self.cssNode.output().strip())
+ self.assertEqual(output, self.css_node.output().strip())
def test_js_split(self):
out = [('file', os.path.join(settings.COMPRESS_ROOT, u'js/one.js'), '<script src="/media/js/one.js" type="text/javascript" charset="utf-8"></script>'),
('hunk', u'obj.value = "value";', '<script type="text/javascript" charset="utf-8">obj.value = "value";</script>')
]
- split = self.jsNode.split_contents()
- split = [(x[0], x[1], self.jsNode.parser.elem_str(x[2])) for x in split]
+ split = self.js_node.split_contents()
+ split = [(x[0], x[1], self.js_node.parser.elem_str(x[2])) for x in split]
self.assertEqual(out, split)
def test_js_hunks(self):
out = ['obj = {};', u'obj.value = "value";']
- self.assertEqual(out, self.jsNode.hunks)
+ self.assertEqual(out, list(self.js_node.hunks))
def test_js_concat(self):
out = u'obj = {};\nobj.value = "value";'
- self.assertEqual(out, self.jsNode.concat())
+ self.assertEqual(out, self.js_node.concat())
def test_js_output(self):
out = u'obj={};obj.value="value";'
- self.assertEqual(out, self.jsNode.combined)
+ self.assertEqual(out, self.js_node.combined)
def test_js_return_if_off(self):
- settings.COMPRESS_ENABLED = False
- self.assertEqual(self.js, self.jsNode.output())
+ try:
+ enabled = settings.COMPRESS_ENABLED
+ settings.COMPRESS_ENABLED = False
+ self.assertEqual(self.js, self.js_node.output())
+ finally:
+ settings.COMPRESS_ENABLED = enabled
def test_js_return_if_on(self):
output = u'<script type="text/javascript" src="/media/cache/js/3f33b9146e12.js" charset="utf-8"></script>'
- self.assertEqual(output, self.jsNode.output())
+ self.assertEqual(output, self.js_node.output())
def test_custom_output_dir(self):
- old_output_dir = settings.COMPRESS_OUTPUT_DIR
- settings.COMPRESS_OUTPUT_DIR = 'custom'
- output = u'<script type="text/javascript" src="/media/custom/js/3f33b9146e12.js" charset="utf-8"></script>'
- self.assertEqual(output, JsCompressor(self.js).output())
- settings.COMPRESS_OUTPUT_DIR = ''
- output = u'<script type="text/javascript" src="/media/js/3f33b9146e12.js" charset="utf-8"></script>'
- self.assertEqual(output, JsCompressor(self.js).output())
- settings.COMPRESS_OUTPUT_DIR = '/custom/nested/'
- output = u'<script type="text/javascript" src="/media/custom/nested/js/3f33b9146e12.js" charset="utf-8"></script>'
- self.assertEqual(output, JsCompressor(self.js).output())
- settings.COMPRESS_OUTPUT_DIR = old_output_dir
+ try:
+ old_output_dir = settings.COMPRESS_OUTPUT_DIR
+ settings.COMPRESS_OUTPUT_DIR = 'custom'
+ output = u'<script type="text/javascript" src="/media/custom/js/3f33b9146e12.js" charset="utf-8"></script>'
+ self.assertEqual(output, JsCompressor(self.js).output())
+ settings.COMPRESS_OUTPUT_DIR = ''
+ output = u'<script type="text/javascript" src="/media/js/3f33b9146e12.js" charset="utf-8"></script>'
+ self.assertEqual(output, JsCompressor(self.js).output())
+ settings.COMPRESS_OUTPUT_DIR = '/custom/nested/'
+ output = u'<script type="text/javascript" src="/media/custom/nested/js/3f33b9146e12.js" charset="utf-8"></script>'
+ self.assertEqual(output, JsCompressor(self.js).output())
+ finally:
+ settings.COMPRESS_OUTPUT_DIR = old_output_dir
if lxml:
class LxmlCompressorTestCase(CompressorTestCase):
@@ -125,8 +131,8 @@ def test_css_split(self):
('hunk', u'p { border:5px solid green;}', u'<style type="text/css">p { border:5px solid green;}</style>'),
('file', os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8">'),
]
- split = self.cssNode.split_contents()
- split = [(x[0], x[1], self.cssNode.parser.elem_str(x[2])) for x in split]
+ split = self.css_node.split_contents()
+ split = [(x[0], x[1], self.css_node.parser.elem_str(x[2])) for x in split]
self.assertEqual(out, split)
def setUp(self):
@@ -146,7 +152,7 @@ def setUp(self):
<link rel="stylesheet" href="/media/css/url/url1.css" type="text/css" charset="utf-8">
<link rel="stylesheet" href="/media/css/url/2/url2.css" type="text/css" charset="utf-8">
"""
- self.cssNode = CssCompressor(self.css)
+ self.css_node = CssCompressor(self.css)
def test_css_absolute_filter(self):
from compressor.filters.css_default import CssAbsoluteFilter
@@ -190,7 +196,7 @@ def test_css_hunks(self):
}
out = [u"p { background: url('/media/images/test.png?%(hash1)s'); }\np { background: url('/media/images/test.png?%(hash1)s'); }\np { background: url('/media/images/test.png?%(hash1)s'); }\np { background: url('/media/images/test.png?%(hash1)s'); }\n" % hash_dict,
u"p { background: url('/media/images/test.png?%(hash2)s'); }\np { background: url('/media/images/test.png?%(hash2)s'); }\np { background: url('/media/images/test.png?%(hash2)s'); }\np { background: url('/media/images/test.png?%(hash2)s'); }\n" % hash_dict]
- self.assertEqual(out, self.cssNode.hunks)
+ self.assertEqual(out, list(self.css_node.hunks))
class CssDataUriTestCase(TestCase):
@@ -204,12 +210,12 @@ def setUp(self):
self.css = """
<link rel="stylesheet" href="/media/css/datauri.css" type="text/css" charset="utf-8">
"""
- self.cssNode = CssCompressor(self.css)
+ self.css_node = CssCompressor(self.css)
def test_data_uris(self):
datauri_hash = get_hashed_mtime(os.path.join(settings.COMPRESS_ROOT, 'css/datauri.css'))
out = [u'.add { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAAK/INwWK6QAAABl0RVh0U29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZTwAAAJvSURBVDjLpZPrS5NhGIf9W7YvBYOkhlkoqCklWChv2WyKik7blnNris72bi6dus0DLZ0TDxW1odtopDs4D8MDZuLU0kXq61CijSIIasOvv94VTUfLiB74fXngup7nvrnvJABJ/5PfLnTTdcwOj4RsdYmo5glBWP6iOtzwvIKSWstI0Wgx80SBblpKtE9KQs/We7EaWoT/8wbWP61gMmCH0lMDvokT4j25TiQU/ITFkek9Ow6+7WH2gwsmahCPdwyw75uw9HEO2gUZSkfyI9zBPCJOoJ2SMmg46N61YO/rNoa39Xi41oFuXysMfh36/Fp0b7bAfWAH6RGi0HglWNCbzYgJaFjRv6zGuy+b9It96N3SQvNKiV9HvSaDfFEIxXItnPs23BzJQd6DDEVM0OKsoVwBG/1VMzpXVWhbkUM2K4oJBDYuGmbKIJ0qxsAbHfRLzbjcnUbFBIpx/qH3vQv9b3U03IQ/HfFkERTzfFj8w8jSpR7GBE123uFEYAzaDRIqX/2JAtJbDat/COkd7CNBva2cMvq0MGxp0PRSCPF8BXjWG3FgNHc9XPT71Ojy3sMFdfJRCeKxEsVtKwFHwALZfCUk3tIfNR8XiJwc1LmL4dg141JPKtj3WUdNFJqLGFVPC4OkR4BxajTWsChY64wmCnMxsWPCHcutKBxMVp5mxA1S+aMComToaqTRUQknLTH62kHOVEE+VQnjahscNCy0cMBWsSI0TCQcZc5ALkEYckL5A5noWSBhfm2AecMAjbcRWV0pUTh0HE64TNf0mczcnnQyu/MilaFJCae1nw2fbz1DnVOxyGTlKeZft/Ff8x1BRssfACjTwQAAAABJRU5ErkJggg=="); }\n.python { background-image: url("/media/img/python.png?%s"); }\n.datauri { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB9YGARc5KB0XV+IAAAAddEVYdENvbW1lbnQAQ3JlYXRlZCB3aXRoIFRoZSBHSU1Q72QlbgAAAF1JREFUGNO9zL0NglAAxPEfdLTs4BZM4DIO4C7OwQg2JoQ9LE1exdlYvBBeZ7jqch9//q1uH4TLzw4d6+ErXMMcXuHWxId3KOETnnXXV6MJpcq2MLaI97CER3N0 vr4MkhoXe0rZigAAAABJRU5ErkJggg=="); }\n' % datauri_hash]
- self.assertEqual(out, self.cssNode.hunks)
+ self.assertEqual(out, list(self.css_node.hunks))
class CssMediaTestCase(TestCase):
@@ -220,10 +226,10 @@ def setUp(self):
<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8" media="all">
<style type="text/css">h1 { border:5px solid green;}</style>
"""
- self.cssNode = CssCompressor(self.css)
+ self.css_node = CssCompressor(self.css)
def test_css_output(self):
- links = BeautifulSoup(self.cssNode.output()).findAll('link')
+ links = BeautifulSoup(self.css_node.output()).findAll('link')
media = [u'screen', u'print', u'all', None]
self.assertEqual(len(links), 4)
self.assertEqual(media, [l.get('media', None) for l in links])
@@ -330,12 +336,12 @@ def test_compress_tag_with_illegal_arguments(self):
class StorageTestCase(TestCase):
def setUp(self):
- self._storage = storage.default_storage
- storage.default_storage = get_storage_class('compressor.tests.storage.TestStorage')()
+ self._storage = base.default_storage
+ base.default_storage = get_storage_class('compressor.storage.GzipCompressorFileStorage')()
settings.COMPRESS_ENABLED = True
def tearDown(self):
- storage.default_storage = self._storage
+ base.default_storage = self._storage
def test_css_tag_with_storage(self):
template = u"""{% load compress %}{% compress css %}
View
73 compressor/utils.py
@@ -27,8 +27,10 @@ def get_class(class_string, exception=FilterError):
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
- raise exception('Failed to import filter %s' % class_string)
- return cls
+ pass
+ else:
+ return cls
+ raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
@@ -192,3 +194,70 @@ def __init__(self, prefix):
def issetting(self, (name, value)):
return name == name.upper()
+
+
+class cached_property(object):
+ """Property descriptor that caches the return value
+ of the get function.
+
+ *Examples*
+
+ .. code-block:: python
+
+ @cached_property
+ def connection(self):
+ return Connection()
+
+ @connection.setter # Prepares stored value
+ def connection(self, value):
+ if value is None:
+ raise TypeError("Connection must be a connection")
+ return value
+
+ @connection.deleter
+ def connection(self, value):
+ # Additional action to do at del(self.attr)
+ if value is not None:
+ print("Connection %r deleted" % (value, ))
+ """
+
+ def __init__(self, fget=None, fset=None, fdel=None, doc=None):
+ self.__get = fget
+ self.__set = fset
+ self.__del = fdel
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+ self.__module__ = fget.__module__
+
+ def __get__(self, obj, type=None):
+ if obj is None:
+ return self
+ try:
+ return obj.__dict__[self.__name__]
+ except KeyError:
+ value = obj.__dict__[self.__name__] = self.__get(obj)
+ return value
+
+ def __set__(self, obj, value):
+ if obj is None:
+ return self
+ if self.__set is not None:
+ value = self.__set(obj, value)
+ obj.__dict__[self.__name__] = value
+
+ def __delete__(self, obj):
+ if obj is None:
+ return self
+ try:
+ value = obj.__dict__.pop(self.__name__)
+ except KeyError:
+ pass
+ else:
+ if self.__del is not None:
+ self.__del(obj, value)
+
+ def setter(self, fset):
+ return self.__class__(self.__get, fset, self.__del)
+
+ def deleter(self, fdel):
+ return self.__class__(self.__get, self.__set, fdel)
View
2 docs/conf.py
@@ -50,7 +50,7 @@
# The short X.Y version.
version = '0.6'
# The full version, including alpha/beta/rc tags.
-release = '0.6b1'
+release = '0.6b2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
View
9 docs/index.txt
@@ -182,6 +182,13 @@ COMPRESS_STORAGE
The dotted path to a Django Storage backend to be used to save the
compressed files.
+``compressor`` ships with one additional storage backend:
+
+* ``'compressor.storage.GzipCompressorFileStorage'``
+
+ A subclass of the default storage backend, which will additionally
+ create ``*.gz`` files of each of the compressed files.
+
COMPRESS_PARSER
^^^^^^^^^^^^^^^
@@ -222,7 +229,7 @@ no file changes are detected.
This is also used by the ``compress`` management command which pre-compresses
the contents of ``{% compress %}`` template tags in the cache.
-COMPRESS_MINT_DELAY``
+COMPRESS_MINT_DELAY
^^^^^^^^^^^^^^^^^^^
:Default: ``30`` (seconds)

0 comments on commit 253efbb

Please sign in to comment.
Something went wrong with that request. Please try again.