Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge branch 'release/0.6b6'

  • Loading branch information...
commit a05728462dd601c0d7973c98dc6396c48fdfbe77 2 parents b052519 + 45d7c87
@jezdez jezdez authored
Showing with 989 additions and 539 deletions.
  1. +3 −1 .gitignore
  2. +1 −0  AUTHORS
  3. +35 −0 LICENSE
  4. +2 −2 compressor/__init__.py
  5. +112 −57 compressor/base.py
  6. +12 −3 compressor/cache.py
  7. +14 −13 compressor/css.py
  8. +9 −0 compressor/exceptions.py
  9. +1 −14 compressor/filters/__init__.py
  10. +85 −0 compressor/filters/base.py
  11. +7 −28 compressor/filters/closure.py
  12. +34 −34 compressor/filters/cssmin/cssmin.py
  13. +7 −28 compressor/filters/csstidy.py
  14. +5 −2 compressor/filters/datauri.py
  15. +0 −31 compressor/filters/less.py
  16. +17 −37 compressor/filters/yui.py
  17. +1 −0  compressor/finders.py
  18. +4 −3 compressor/js.py
  19. +25 −19 compressor/management/commands/compress.py
  20. +9 −3 compressor/parser.py
  21. +29 −19 compressor/settings.py
  22. +13 −0 compressor/storage.py
  23. +1 −1  compressor/templates/compressor/css.html
  24. +1 −0  compressor/templates/compressor/css_file.html
  25. +1 −1  compressor/templates/compressor/js.html
  26. +1 −0  compressor/templates/compressor/js_file.html
  27. +10 −9 compressor/templatetags/compress.py
  28. +0 −1  compressor/tests/media/custom/js/3f33b9146e12.js
  29. +0 −1  compressor/tests/media/custom/nested/js/3f33b9146e12.js
  30. +0 −1  compressor/tests/media/js/3f33b9146e12.js
  31. +10 −6 compressor/tests/tests.py
  32. +441 −171 compressor/utils.py
  33. +1 −1  docs/conf.py
  34. +86 −0 docs/index.txt
  35. +12 −53 tox.ini
View
4 .gitignore
@@ -1,5 +1,7 @@
build
-*CACHE*
+compressor/tests/media/CACHE
+compressor/tests/media/custom
+compressor/tests/media/js/3f33b9146e12.js
dist
MANIFEST
*.pyc
View
1  AUTHORS
@@ -14,6 +14,7 @@ Benjamin Wohlwend
Brad Whittington
Chris Adams
David Ziegler
+Eugene Mirotin
Gert Van Gool
Justin Lilly
Maciek Szczesniak
View
35 LICENSE
@@ -105,3 +105,38 @@ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
+
+utils.FormattableString
+-----------------------
+Copyright (c) 2010 by Florent Xicluna.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms of the software as well
+as documentation, with or without modification, are permitted provided
+that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+* The names of the contributors may not be used to endorse or
+ promote products derived from this software without specific
+ prior written permission.
+
+THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
+NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
View
4 compressor/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (0, 6, 0, "b", 5) # following PEP 386
+VERSION = (0, 6, 0, "b", 6) # following PEP 386
DEV_N = None
@@ -13,4 +13,4 @@ def get_version():
return version
-__version__ = get_version()
+__version__ = get_version()
View
169 compressor/base.py
@@ -1,29 +1,41 @@
+import fnmatch
import os
import socket
from itertools import chain
-from django.template.loader import render_to_string
from django.core.files.base import ContentFile
+from django.core.exceptions import ImproperlyConfigured
+from django.template.loader import render_to_string
from compressor.cache import get_hexdigest, get_mtime
from compressor.conf import settings
-from compressor.exceptions import UncompressableFileError
+from compressor.exceptions import CompressorError, UncompressableFileError
+from compressor.filters import CompilerFilter
from compressor.storage import default_storage
from compressor.utils import get_class, cached_property
+
class Compressor(object):
+ """
+ Base compressor object to be subclassed for content type
+ depending implementations details.
+ """
+ type = None
def __init__(self, content=None, output_prefix="compressed"):
self.content = content or ""
- self.extra_context = {}
- self.type = None
self.output_prefix = output_prefix
- self.split_content = []
self.charset = settings.DEFAULT_CHARSET
+ self.storage = default_storage
+ self.split_content = []
+ self.extra_context = {}
def split_contents(self):
- raise NotImplementedError(
- "split_contents must be defined in a subclass")
+ """
+ To be implemented in a subclass, should return an
+ iterable with three values: kind, value, element
+ """
+ raise NotImplementedError
def get_filename(self, url):
try:
@@ -50,47 +62,62 @@ def cached_filters(self):
@cached_property
def mtimes(self):
- for kind, value, elem in self.split_contents():
- if kind == 'file':
- yield str(get_mtime(value))
+ return [str(get_mtime(value))
+ for kind, value, _ in self.split_contents() if kind == 'file']
@cached_property
def cachekey(self):
- cachestr = "".join(
- chain([self.content], self.mtimes)).encode(self.charset)
- return "django_compressor.%s.%s" % (socket.gethostname(),
- get_hexdigest(cachestr)[:12])
-
- @cached_property
- def storage(self):
- return default_storage
+ key = get_hexdigest(''.join(
+ [self.content] + self.mtimes).encode(self.charset), 12)
+ return "django_compressor.%s.%s" % (socket.gethostname(), key)
@cached_property
def hunks(self):
for kind, value, elem in self.split_contents():
- attribs = self.parser.elem_attribs(elem)
if kind == "hunk":
# Let's cast BeautifulSoup element to unicode here since
# it will try to encode using ascii internally later
- yield unicode(self.filter(value, "input", elem=elem))
+ yield unicode(self.filter(
+ value, method="input", elem=elem, kind=kind))
elif kind == "file":
content = ""
+ fd = open(value, 'rb')
try:
- fd = open(value, 'rb')
- try:
- content = fd.read()
- finally:
- fd.close()
+ content = fd.read()
except IOError, e:
raise UncompressableFileError(
"IOError while processing '%s': %s" % (value, e))
- content = self.filter(content, "input", filename=value, elem=elem)
- yield unicode(content, attribs.get("charset", self.charset))
+ finally:
+ fd.close()
+ content = self.filter(content,
+ method="input", filename=value, elem=elem, kind=kind)
+ attribs = self.parser.elem_attribs(elem)
+ charset = attribs.get("charset", self.charset)
+ yield unicode(content, charset)
+ @cached_property
def concat(self):
- return "\n".join((hunk.encode(self.charset) for hunk in self.hunks))
+ return '\n'.join((hunk.encode(self.charset) for hunk in self.hunks))
+
+ def precompile(self, content, kind=None, elem=None, filename=None, **kwargs):
+ if not kind:
+ return content
+ attrs = self.parser.elem_attribs(elem)
+ mimetype = attrs.get("type", None)
+ if mimetype is not None:
+ for mimetypes, command in settings.COMPRESS_PRECOMPILERS:
+ if not isinstance(mimetypes, (list, tuple)):
+ mimetypes = (mimetypes,)
+ if mimetype in mimetypes:
+ content = CompilerFilter(content, filter_type=self.type,
+ command=command).output(**kwargs)
+ return content
def filter(self, content, method, **kwargs):
+ # run compiler
+ if method == "input":
+ content = self.precompile(content, **kwargs)
+
for filter_cls in self.cached_filters:
filter_func = getattr(
filter_cls(content, filter_type=self.type), method)
@@ -103,37 +130,65 @@ def filter(self, content, method, **kwargs):
@cached_property
def combined(self):
- return self.filter(self.concat(), 'output')
+ return self.filter(self.concat, method="output")
- @cached_property
- def hash(self):
- return get_hexdigest(self.combined)[:12]
+ def hash(self, content):
+ return get_hexdigest(content)[:12]
- @cached_property
- def new_filepath(self):
+ def filepath(self, content):
return os.path.join(settings.COMPRESS_OUTPUT_DIR.strip(os.sep),
- self.output_prefix, "%s.%s" % (self.hash, self.type))
-
- def save_file(self):
- if self.storage.exists(self.new_filepath):
- return False
- self.storage.save(self.new_filepath, ContentFile(self.combined))
- return True
-
- def output(self, forced=False):
- if not settings.COMPRESS_ENABLED and not forced:
- return self.content
- context = {
- "saved": self.save_file(),
- "url": self.storage.url(self.new_filepath),
- }
- context.update(self.extra_context)
- return render_to_string(self.template_name, context)
-
- def output_inline(self):
- if settings.COMPRESS_ENABLED:
+ self.output_prefix, "%s.%s" % (self.hash(content), self.type))
+
+ def output(self, mode='file', forced=False):
+ """
+ The general output method, override in subclass if you need to do
+ any custom modification. Calls other mode specific methods or simply
+ returns the content directly.
+ """
+ # First check whether we should do the full compression,
+ # including precompilation (or if it's forced)
+ if settings.COMPRESS_ENABLED or forced:
content = self.combined
+ elif settings.COMPRESS_PRECOMPILERS:
+ # or concatting it, if pre-compilation is enabled
+ content = self.concat
else:
- content = self.concat()
- context = dict(content=content, **self.extra_context)
- return render_to_string(self.template_name_inline, context)
+ # or just doing nothing, when neither
+ # compression nor compilation is enabled
+ return self.content
+ # Then check for the appropriate output method and call it
+ output_func = getattr(self, "output_%s" % mode, None)
+ if callable(output_func):
+ return output_func(mode, content)
+ # Total failure, raise a general exception
+ raise CompressorError(
+ "Couldn't find output method for mode '%s'" % mode)
+
+ def output_file(self, mode, content):
+ """
+ The output method that saves the content to a file and renders
+ the appropriate template with the file's URL.
+ """
+ new_filepath = self.filepath(content)
+ if not self.storage.exists(new_filepath):
+ self.storage.save(new_filepath, ContentFile(content))
+ url = self.storage.url(new_filepath)
+ return self.render_output(mode, {"url": url})
+
+ def output_inline(self, mode, content):
+ """
+ The output method that directly returns the content for inline
+ display.
+ """
+ return self.render_output(mode, {"content": content})
+
+ def render_output(self, mode, context=None):
+ """
+ Renders the compressor output with the appropriate template for
+ the given mode and template context.
+ """
+ if context is None:
+ context = {}
+ context.update(self.extra_context)
+ return render_to_string(
+ "compressor/%s_%s.html" % (self.type, mode), context)
View
15 compressor/cache.py
@@ -7,18 +7,25 @@
from compressor.conf import settings
-def get_hexdigest(plaintext):
- return sha_constructor(plaintext).hexdigest()
+
+def get_hexdigest(plaintext, length=None):
+ digest = sha_constructor(plaintext).hexdigest()
+ if length:
+ return digest[:length]
+ return digest
+
def get_mtime_cachekey(filename):
return "django_compressor.mtime.%s.%s" % (socket.gethostname(),
get_hexdigest(filename))
+
def get_offline_cachekey(source):
return ("django_compressor.offline.%s.%s" %
(socket.gethostname(),
get_hexdigest("".join(smart_str(s) for s in source))))
+
def get_mtime(filename):
if settings.COMPRESS_MTIME_DELAY:
key = get_mtime_cachekey(filename)
@@ -29,9 +36,11 @@ def get_mtime(filename):
return mtime
return os.path.getmtime(filename)
+
def get_hashed_mtime(filename, length=12):
filename = os.path.realpath(filename)
mtime = str(int(get_mtime(filename)))
- return get_hexdigest(mtime)[:length]
+ return get_hexdigest(mtime, length)
+
cache = get_cache(settings.COMPRESS_CACHE_BACKEND)
View
27 compressor/css.py
@@ -2,12 +2,13 @@
from compressor.base import Compressor
from compressor.exceptions import UncompressableFileError
+
class CssCompressor(Compressor):
+ template_name = "compressor/css.html"
+ template_name_inline = "compressor/css_inline.html"
def __init__(self, content=None, output_prefix="css"):
super(CssCompressor, self).__init__(content, output_prefix)
- self.template_name = "compressor/css.html"
- self.template_name_inline = "compressor/css_inline.html"
self.filters = list(settings.COMPRESS_CSS_FILTERS)
self.type = 'css'
@@ -21,8 +22,8 @@ def split_contents(self):
elem_attribs = self.parser.elem_attribs(elem)
if elem_name == 'link' and elem_attribs['rel'] == 'stylesheet':
try:
- content = self.parser.elem_content(elem)
- data = ('file', self.get_filename(elem_attribs['href']), elem)
+ data = (
+ 'file', self.get_filename(elem_attribs['href']), elem)
except UncompressableFileError:
if settings.DEBUG:
raise
@@ -41,14 +42,14 @@ def split_contents(self):
self.media_nodes.append((media, node))
return self.split_content
- def output(self, forced=False):
+ def output(self, *args, **kwargs):
self.split_contents()
if not hasattr(self, 'media_nodes'):
- return super(CssCompressor, self).output(forced=forced)
- if not settings.COMPRESS_ENABLED and not forced:
- return self.content
- ret = []
- for media, subnode in self.media_nodes:
- subnode.extra_context.update({'media': media})
- ret.append(subnode.output(forced=forced))
- return "".join(ret)
+ return super(CssCompressor, self).output(*args, **kwargs)
+ if settings.COMPRESS_ENABLED or kwargs.get('forced', False):
+ ret = []
+ for media, subnode in self.media_nodes:
+ subnode.extra_context.update({'media': media})
+ ret.append(subnode.output(*args, **kwargs))
+ return "".join(ret)
+ return self.content
View
9 compressor/exceptions.py
@@ -1,21 +1,30 @@
+class CompressorError(Exception):
+ """
+ A general error of the compressor
+ """
+ pass
+
class UncompressableFileError(Exception):
"""
This exception is raised when a file cannot be compressed
"""
pass
+
class FilterError(Exception):
"""
This exception is raised when a filter fails
"""
pass
+
class ParserError(Exception):
"""
This exception is raised when the parser fails
"""
pass
+
class OfflineGenerationError(Exception):
"""
Offline compression generation related exceptions
View
15 compressor/filters/__init__.py
@@ -1,14 +1 @@
-from compressor.conf import settings
-from compressor.exceptions import FilterError
-
-class FilterBase(object):
- def __init__(self, content, filter_type=None, verbose=0):
- self.type = filter_type
- self.content = content
- self.verbose = verbose or settings.COMPRESS_VERBOSE
-
- def input(self, **kwargs):
- raise NotImplementedError
-
- def output(self, **kwargs):
- raise NotImplementedError
+from compressor.filters.base import FilterBase, CompilerFilter, FilterError
View
85 compressor/filters/base.py
@@ -0,0 +1,85 @@
+import os
+import logging
+import subprocess
+import tempfile
+
+from compressor.conf import settings
+from compressor.exceptions import FilterError
+from compressor.utils import cmd_split, FormattableString
+
+logger = logging.getLogger("compressor.filters")
+
+
+class FilterBase(object):
+
+ def __init__(self, content, filter_type=None, verbose=0):
+ self.type = filter_type
+ self.content = content
+ self.verbose = verbose or settings.COMPRESS_VERBOSE
+ self.logger = logger
+
+ def input(self, **kwargs):
+ raise NotImplementedError
+
+ def output(self, **kwargs):
+ raise NotImplementedError
+
+
+class CompilerFilter(FilterBase):
+ """
+ A filter subclass that is able to filter content via
+ external commands.
+ """
+ command = None
+
+ def __init__(self, content, filter_type=None, verbose=0, command=None):
+ super(CompilerFilter, self).__init__(content, filter_type, verbose)
+ if command:
+ self.command = command
+ if self.command is None:
+ raise FilterError("Required command attribute not set")
+ self.stdout = subprocess.PIPE
+ self.stdin = subprocess.PIPE
+ self.stderr = subprocess.PIPE
+
+ def output(self, **kwargs):
+ infile = None
+ outfile = None
+ options = {}
+ try:
+ if "{infile}" in self.command:
+ infile = tempfile.NamedTemporaryFile(mode='w')
+ infile.write(self.content)
+ infile.flush()
+ options["infile"] = infile.name
+ if "{outfile}" in self.command:
+ ext = ".%s" % self.type and self.type or ""
+ outfile = tempfile.NamedTemporaryFile(mode='w', suffix=ext)
+ options["outfile"] = outfile.name
+ cmd = FormattableString(self.command).format(**options)
+ proc = subprocess.Popen(cmd_split(cmd),
+ stdout=self.stdout, stdin=self.stdin, stderr=self.stderr)
+ if infile is not None:
+ filtered, err = proc.communicate()
+ else:
+ filtered, err = proc.communicate(self.content)
+ except (IOError, OSError), e:
+ raise FilterError('Unable to apply %s (%r): %s' % (
+ self.__class__.__name__, self.command, e))
+ finally:
+ if infile:
+ infile.close()
+ if proc.wait() != 0:
+ if not err:
+ err = 'Unable to apply %s (%s)' % (
+ self.__class__.__name__, self.command)
+ raise FilterError(err)
+ if self.verbose:
+ self.logger.debug(err)
+ if outfile is not None:
+ try:
+ outfile_obj = open(outfile.name)
+ filtered = outfile_obj.read()
+ finally:
+ outfile_obj.close()
+ return filtered
View
35 compressor/filters/closure.py
@@ -1,31 +1,10 @@
-from subprocess import Popen, PIPE
-
from compressor.conf import settings
-from compressor.filters import FilterBase, FilterError
-from compressor.utils import cmd_split
-
-
-class ClosureCompilerFilter(FilterBase):
-
- def output(self, **kwargs):
- arguments = settings.COMPRESS_CLOSURE_COMPILER_ARGUMENTS
-
- command = '%s %s' % (settings.COMPRESS_CLOSURE_COMPILER_BINARY, arguments)
-
- try:
- p = Popen(cmd_split(command), stdout=PIPE, stdin=PIPE, stderr=PIPE)
- filtered, err = p.communicate(self.content)
-
- except IOError, e:
- raise FilterError(e)
-
- if p.wait() != 0:
- if not err:
- err = 'Unable to apply Closure Compiler filter'
- raise FilterError(err)
-
- if self.verbose:
- print err
+from compressor.filters import CompilerFilter
- return filtered
+class ClosureCompilerFilter(CompilerFilter):
+ command = "%(binary)s %(args)s"
+ options = {
+ "binary": settings.COMPRESS_CLOSURE_COMPILER_ARGUMENTS,
+ "args": settings.COMPRESS_CLOSURE_COMPILER_ARGUMENTS,
+ }
View
68 compressor/filters/cssmin/cssmin.py
@@ -2,9 +2,9 @@
# -*- coding: utf-8 -*-
#
# `cssmin.py` - A Python port of the YUI CSS compressor.
-#
+#
# Copyright (c) 2010 Zachary Voase
-#
+#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
@@ -13,10 +13,10 @@
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
-#
+#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
-#
+#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
@@ -25,7 +25,7 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
-#
+#
"""`cssmin` - A Python port of the YUI CSS compressor."""
@@ -41,7 +41,7 @@
def remove_comments(css):
"""Remove all CSS comment blocks."""
-
+
iemac = False
preserve = False
comment_start = css.find("/*")
@@ -49,7 +49,7 @@ def remove_comments(css):
# Preserve comments that look like `/*!...*/`.
# Slicing is used to make sure we don"t get an IndexError.
preserve = css[comment_start + 2:comment_start + 3] == "!"
-
+
comment_end = css.find("*/", comment_start + 2)
if comment_end < 0:
if not preserve:
@@ -69,22 +69,22 @@ def remove_comments(css):
else:
comment_start = comment_end + 2
comment_start = css.find("/*", comment_start)
-
+
return css
def remove_unnecessary_whitespace(css):
"""Remove unnecessary whitespace characters."""
-
+
def pseudoclasscolon(css):
-
+
"""
Prevents 'p :link' from becoming 'p:link'.
-
+
Translates 'p :link' into 'p ___PSEUDOCLASSCOLON___link'; this is
translated back again later.
"""
-
+
regex = re.compile(r"(^|\})(([^\{\:])+\:)+([^\{]*\{)")
match = regex.search(css)
while match:
@@ -94,43 +94,43 @@ def pseudoclasscolon(css):
css[match.end():]])
match = regex.search(css)
return css
-
+
css = pseudoclasscolon(css)
# Remove spaces from before things.
css = re.sub(r"\s+([!{};:>+\(\)\],])", r"\1", css)
-
+
# If there is a `@charset`, then only allow one, and move to the beginning.
css = re.sub(r"^(.*)(@charset \"[^\"]*\";)", r"\2\1", css)
css = re.sub(r"^(\s*@charset [^;]+;\s*)+", r"\1", css)
-
+
# Put the space back in for a few cases, such as `@media screen` and
# `(-webkit-min-device-pixel-ratio:0)`.
css = re.sub(r"\band\(", "and (", css)
-
+
# Put the colons back.
css = css.replace('___PSEUDOCLASSCOLON___', ':')
-
+
# Remove spaces from after things.
css = re.sub(r"([!{}:;>+\(\[,])\s+", r"\1", css)
-
+
return css
def remove_unnecessary_semicolons(css):
"""Remove unnecessary semicolons."""
-
+
return re.sub(r";+\}", "}", css)
def remove_empty_rules(css):
"""Remove empty rules."""
-
+
return re.sub(r"[^\}\{]+\{\}", "", css)
def normalize_rgb_colors_to_hex(css):
"""Convert `rgb(51,102,153)` to `#336699`."""
-
+
regex = re.compile(r"rgb\s*\(\s*([0-9,\s]+)\s*\)")
match = regex.search(css)
while match:
@@ -143,32 +143,32 @@ def normalize_rgb_colors_to_hex(css):
def condense_zero_units(css):
"""Replace `0(px, em, %, etc)` with `0`."""
-
+
return re.sub(r"([\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)", r"\1\2", css)
def condense_multidimensional_zeros(css):
"""Replace `:0 0 0 0;`, `:0 0 0;` etc. with `:0;`."""
-
+
css = css.replace(":0 0 0 0;", ":0;")
css = css.replace(":0 0 0;", ":0;")
css = css.replace(":0 0;", ":0;")
-
+
# Revert `background-position:0;` to the valid `background-position:0 0;`.
css = css.replace("background-position:0;", "background-position:0 0;")
-
+
return css
def condense_floating_points(css):
"""Replace `0.6` with `.6` where possible."""
-
+
return re.sub(r"(:|\s)0+\.(\d+)", r"\1.\2", css)
def condense_hex_colors(css):
"""Shorten colors from #AABBCC to #ABC where possible."""
-
+
regex = re.compile(r"([^\"'=\s])(\s*)#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])")
match = regex.search(css)
while match:
@@ -184,19 +184,19 @@ def condense_hex_colors(css):
def condense_whitespace(css):
"""Condense multiple adjacent whitespace characters into one."""
-
+
return re.sub(r"\s+", " ", css)
def condense_semicolons(css):
"""Condense multiple adjacent semicolon characters into one."""
-
+
return re.sub(r";;+", ";", css)
def wrap_css_lines(css, line_length):
"""Wrap the lines of the given CSS to an approximate length."""
-
+
lines = []
line_start = 0
for i, char in enumerate(css):
@@ -204,7 +204,7 @@ def wrap_css_lines(css, line_length):
if char == '}' and (i - line_start >= line_length):
lines.append(css[line_start:i + 1])
line_start = i + 1
-
+
if line_start < len(css):
lines.append(css[line_start:])
return '\n'.join(lines)
@@ -233,16 +233,16 @@ def cssmin(css, wrap=None):
def main():
import optparse
import sys
-
+
p = optparse.OptionParser(
prog="cssmin", version=__version__,
usage="%prog [--wrap N]",
description="""Reads raw CSS from stdin, and writes compressed CSS to stdout.""")
-
+
p.add_option(
'-w', '--wrap', type='int', default=None, metavar='N',
help="Wrap output to approximately N chars per line.")
-
+
options, args = p.parse_args()
sys.stdout.write(cssmin(sys.stdin.read(), wrap=options.wrap))
View
35 compressor/filters/csstidy.py
@@ -1,31 +1,10 @@
-from subprocess import Popen, PIPE
-import tempfile
-import warnings
-
from compressor.conf import settings
-from compressor.filters import FilterBase
-
-warnings.simplefilter('ignore', RuntimeWarning)
-
-class CSSTidyFilter(FilterBase):
-
- def output(self, **kwargs):
- tmp_file = tempfile.NamedTemporaryFile(mode='w+b')
- tmp_file.write(self.content)
- tmp_file.flush()
-
- output_file = tempfile.NamedTemporaryFile(mode='w+b')
-
- command = '%s %s %s %s' % (settings.COMPRESS_CSSTIDY_BINARY, tmp_file.name, settings.COMPRESS_CSSTIDY_ARGUMENTS, output_file.name)
-
- command_output = Popen(command, shell=True,
- stdout=PIPE, stdin=PIPE, stderr=PIPE).communicate()
-
- filtered_css = output_file.read()
- output_file.close()
- tmp_file.close()
+from compressor.filters import CompilerFilter
- if self.verbose:
- print command_output
- return filtered_css
+class CSSTidyFilter(CompilerFilter):
+ command = "%(binary)s %(infile)s %(args)s %(outfile)s"
+ options = {
+ "binary": settings.COMPRESS_CSSTIDY_BINARY,
+ "args": settings.COMPRESS_CSSTIDY_ARGUMENTS,
+ }
View
7 compressor/filters/datauri.py
@@ -6,6 +6,7 @@
from compressor.conf import settings
from compressor.filters import FilterBase
+
class DataUriFilter(FilterBase):
"""Filter for embedding media as data: URIs.
@@ -28,7 +29,8 @@ def get_file_path(self, url):
# strip query string of file paths
if "?" in url:
url = url.split("?")[0]
- return os.path.join(settings.COMPRESS_ROOT, url[len(settings.COMPRESS_URL):])
+ return os.path.join(
+ settings.COMPRESS_ROOT, url[len(settings.COMPRESS_URL):])
def data_uri_converter(self, matchobj):
url = matchobj.group(1).strip(' \'"')
@@ -36,7 +38,8 @@ def data_uri_converter(self, matchobj):
path = self.get_file_path(url)
if os.stat(path).st_size <= settings.COMPRESS_DATA_URI_MIN_SIZE:
data = b64encode(open(path, 'rb').read())
- return 'url("data:%s;base64,%s")' % (mimetypes.guess_type(path)[0], data)
+ return 'url("data:%s;base64,%s")' % (
+ mimetypes.guess_type(path)[0], data)
return 'url("%s")' % url
View
31 compressor/filters/less.py
@@ -1,31 +0,0 @@
-import os
-import warnings
-import tempfile
-
-from compressor.conf import settings
-from compressor.filters import FilterBase
-
-warnings.simplefilter('ignore', RuntimeWarning)
-
-class LessFilter(FilterBase):
-
- def output(self, **kwargs):
-
- tmp_file = tempfile.NamedTemporaryFile(mode='w+b')
- tmp_file.write(self.content)
- tmp_file.flush()
-
- output_file = tempfile.NamedTemporaryFile(mode='w+b')
-
- command = '%s %s %s' % (settings.COMPRESS_LESSC_BINARY, tmp_file.name, output_file.name)
-
- command_output = os.popen(command).read()
-
- filtered_css = output_file.read()
- output_file.close()
- tmp_file.close()
-
- if self.verbose:
- print command_output
-
- return filtered_css
View
54 compressor/filters/yui.py
@@ -1,48 +1,28 @@
-from subprocess import Popen, PIPE
-
from compressor.conf import settings
-from compressor.filters import FilterBase, FilterError
-from compressor.utils import cmd_split
-
-
-class YUICompressorFilter(FilterBase):
+from compressor.filters import CompilerFilter
- def output(self, **kwargs):
- arguments = ''
- if self.type == 'js':
- arguments = settings.COMPRESS_YUI_JS_ARGUMENTS
- elif self.type == 'css':
- arguments = settings.COMPRESS_YUI_CSS_ARGUMENTS
-
- command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, self.type, arguments)
-
- if self.verbose:
- command += ' --verbose'
- try:
- p = Popen(cmd_split(command), stdin=PIPE, stdout=PIPE, stderr=PIPE)
- filtered, err = p.communicate(self.content)
- except IOError, e:
- raise FilterError(e)
-
- if p.wait() != 0:
- if not err:
- err = 'Unable to apply YUI Compressor filter'
- raise FilterError(err)
+class YUICompressorFilter(CompilerFilter):
+ command = "%(binary)s %(args)s"
+ def __init__(self, *args, **kwargs):
+ super(YUICompressorFilter, self).__init__(*args, **kwargs)
+ self.command += ' --type=%s' % self.type
if self.verbose:
- print err
-
- return filtered
+ self.command += ' --verbose'
class YUICSSFilter(YUICompressorFilter):
- def __init__(self, *args, **kwargs):
- super(YUICSSFilter, self).__init__(*args, **kwargs)
- self.type = 'css'
+ type = 'css'
+ options = {
+ "binary": settings.COMPRESS_YUI_BINARY,
+ "args": settings.COMPRESS_YUI_CSS_ARGUMENTS,
+ }
class YUIJSFilter(YUICompressorFilter):
- def __init__(self, *args, **kwargs):
- super(YUIJSFilter, self).__init__(*args, **kwargs)
- self.type = 'js'
+ type = 'js'
+ options = {
+ "binary": settings.COMPRESS_YUI_BINARY,
+ "args": settings.COMPRESS_YUI_CSS_ARGUMENTS,
+ }
View
1  compressor/finders.py
@@ -13,6 +13,7 @@
"standalone version django-staticfiles needs "
"to be installed.")
+
class CompressorFinder(BaseStorageFinder):
"""
A staticfiles finder that looks in COMPRESS_ROOT
View
7 compressor/js.py
@@ -4,11 +4,11 @@
class JsCompressor(Compressor):
+ template_name = "compressor/js.html"
+ template_name_inline = "compressor/js_inline.html"
def __init__(self, content=None, output_prefix="js"):
super(JsCompressor, self).__init__(content, output_prefix)
- self.template_name = "compressor/js.html"
- self.template_name_inline = "compressor/js_inline.html"
self.filters = list(settings.COMPRESS_JS_FILTERS)
self.type = 'js'
@@ -19,7 +19,8 @@ def split_contents(self):
attribs = self.parser.elem_attribs(elem)
if 'src' in attribs:
try:
- self.split_content.append(('file', self.get_filename(attribs['src']), elem))
+ self.split_content.append(
+ ('file', self.get_filename(attribs['src']), elem))
except UncompressableFileError:
if settings.DEBUG:
raise
View
44 compressor/management/commands/compress.py
@@ -21,28 +21,30 @@
class Command(NoArgsCommand):
- help = "Generate the compressor content outside of the request/response cycle"
+ help = "Compress content outside of the request/response cycle"
option_list = NoArgsCommand.option_list + (
make_option('--extension', '-e', action='append', dest='extensions',
help='The file extension(s) to examine (default: ".html", '
'separate multiple extensions with commas, or use -e '
'multiple times)'),
- make_option('-f', '--force', default=False, action='store_true', dest='force',
+ make_option('-f', '--force', default=False, action='store_true',
help="Force generation of compressor content even if "
- "COMPRESS setting is not True."),
- make_option('--follow-links', default=False, action='store_true', dest='follow_links',
+ "COMPRESS setting is not True.", dest='force'),
+ make_option('--follow-links', default=False, action='store_true',
help="Follow symlinks when traversing the COMPRESS_ROOT "
"(which defaults to MEDIA_ROOT). Be aware that using this "
"can lead to infinite recursion if a link points to a parent "
- "directory of itself."),
+ "directory of itself.", dest='follow_links'),
)
def get_loaders(self):
from django.template.loader import template_source_loaders
if template_source_loaders is None:
try:
- from django.template.loader import find_template as finder_func
+ from django.template.loader import (
+ find_template as finder_func)
except ImportError:
- from django.template.loader import find_template_source as finder_func
+ from django.template.loader import (
+ find_template_source as finder_func)
try:
source, name = finder_func('test')
except TemplateDoesNotExist:
@@ -71,7 +73,8 @@ def compress(self, log=None, **options):
for loader in self.get_loaders():
try:
module = import_module(loader.__module__)
- get_template_sources = getattr(module, 'get_template_sources', None)
+ get_template_sources = getattr(module,
+ 'get_template_sources', None)
if get_template_sources is None:
get_template_sources = loader.get_template_sources
paths.update(list(get_template_sources('')))
@@ -89,7 +92,8 @@ def compress(self, log=None, **options):
log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
templates = set()
for path in paths:
- for root, dirs, files in walk(path, followlinks=options.get('followlinks', False)):
+ for root, dirs, files in walk(path,
+ followlinks=options.get('followlinks', False)):
templates.update(os.path.join(root, name)
for name in files if any(fnmatch(name, "*%s" % glob)
for glob in extensions))
@@ -126,7 +130,8 @@ def compress(self, log=None, **options):
compressor_nodes.setdefault(template_name, []).extend(nodes)
if not compressor_nodes:
- raise OfflineGenerationError("No 'compress' template tags found in templates.")
+ raise OfflineGenerationError(
+ "No 'compress' template tags found in templates.")
if verbosity > 0:
log.write("Found 'compress' tags in:\n\t" +
@@ -175,18 +180,19 @@ def handle_extensions(self, extensions=('html',)):
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
-
- # we don't want *.py files here because of the way non-*.py files
- # are handled in make_messages() (they are copied to file.ext.py files to
- # trick xgettext to parse them as Python files)
- return set([x for x in ext_list if x != '.py'])
+ return set(ext_list)
def handle_noargs(self, **options):
if not settings.COMPRESS_ENABLED and not options.get("force"):
- raise CommandError("Compressor is disabled. Set COMPRESS settting or use --force to override.")
+ raise CommandError(
+ "Compressor is disabled. Set COMPRESS "
+ "settting or use --force to override.")
if not settings.COMPRESS_OFFLINE:
if not options.get("force"):
- raise CommandError("Offline compressiong is disabled. Set COMPRESS_OFFLINE or use the --force to override.")
- warnings.warn("COMPRESS_OFFLINE is not set. Offline generated "
- "cache will not be used.")
+ raise CommandError(
+ "Offline compressiong is disabled. Set "
+ "COMPRESS_OFFLINE or use the --force to override.")
+ warnings.warn(
+ "COMPRESS_OFFLINE is not set to True. "
+ "Offline generated cache will not be used.")
self.compress(sys.stdout, **options)
View
12 compressor/parser.py
@@ -2,8 +2,11 @@
from compressor.exceptions import ParserError
-class ParserBase(object):
+class ParserBase(object):
+ """
+ Base parser to be subclassed when creating an own parser.
+ """
def __init__(self, content):
self.content = content
@@ -43,6 +46,7 @@ def elem_str(self, elem):
"""
raise NotImplementedError
+
class BeautifulSoupParser(ParserBase):
_soup = None
@@ -57,7 +61,7 @@ def soup(self):
return self._soup
def css_elems(self):
- return self.soup.findAll({'link' : True, 'style' : True})
+ return self.soup.findAll({'link': True, 'style': True})
def js_elems(self):
return self.soup.findAll('script')
@@ -74,6 +78,7 @@ def elem_name(self, elem):
def elem_str(self, elem):
return smart_unicode(elem)
+
class LxmlParser(ParserBase):
_tree = None
@@ -110,4 +115,5 @@ def elem_name(self, elem):
def elem_str(self, elem):
from lxml import etree
- return smart_unicode(etree.tostring(elem, method='html', encoding=unicode))
+ return smart_unicode(
+ etree.tostring(elem, method='html', encoding=unicode))
View
48 compressor/settings.py
@@ -1,5 +1,3 @@
-import os
-
from django import VERSION as DJANGO_VERSION
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
@@ -24,29 +22,34 @@ class CompressorSettings(AppSettings):
CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
JS_FILTERS = ['compressor.filters.jsmin.JSMinFilter']
-
- LESSC_BINARY = LESSC_BINARY = 'lessc'
+ PRECOMPILERS = (
+ # ('text/coffeescript', 'coffee --compile --stdio'),
+ # ('text/less', 'lessc {infile} {outfile}'),
+ # ('text/x-sass', 'sass {infile} {outfile}'),
+ # ('text/x-scss', 'sass --scss {infile} {outfile}'),
+ )
CLOSURE_COMPILER_BINARY = 'java -jar compiler.jar'
CLOSURE_COMPILER_ARGUMENTS = ''
CSSTIDY_BINARY = 'csstidy'
CSSTIDY_ARGUMENTS = '--template=highest'
YUI_BINARY = 'java -jar yuicompressor.jar'
YUI_CSS_ARGUMENTS = ''
- YUI_JS_ARGUMENTS = 'COMPRESS_YUI_JS_ARGUMENTS'
+ YUI_JS_ARGUMENTS = ''
DATA_URI_MIN_SIZE = 1024
+
# the cache backend to use
CACHE_BACKEND = None
# rebuilds the cache every 30 days if nothing has changed.
- REBUILD_TIMEOUT = 60 * 60 * 24 * 30 # 30 days
+ REBUILD_TIMEOUT = 60 * 60 * 24 * 30 # 30 days
# the upper bound on how long any compression should take to be generated
# (used against dog piling, should be a lot smaller than REBUILD_TIMEOUT
- MINT_DELAY = 30 # seconds
+ MINT_DELAY = 30 # seconds
# check for file changes only after a delay
- MTIME_DELAY = 10 # seconds
- # enables the offline cache -- a cache that is filled by the compress management command
+ MTIME_DELAY = 10 # seconds
+ # enables the offline cache -- also filled by the compress command
OFFLINE = False
# invalidates the offline cache after one year
- OFFLINE_TIMEOUT = 60 * 60 * 24 * 365 # 1 year
+ OFFLINE_TIMEOUT = 60 * 60 * 24 * 365 # 1 year
# The context to be used when compressing the files "offline"
OFFLINE_CONTEXT = {}
@@ -59,7 +62,8 @@ def configure_root(self, value):
if not value:
value = settings.MEDIA_ROOT
if not value:
- raise ImproperlyConfigured("The COMPRESS_ROOT setting must be set.")
+ raise ImproperlyConfigured(
+ "The COMPRESS_ROOT setting must be set.")
# In case staticfiles is used, make sure the FileSystemFinder is
# installed, and if it is, check if COMPRESS_ROOT is listed in
# STATICFILES_DIRS to allow finding compressed files
@@ -78,14 +82,14 @@ def configure_root(self, value):
return value
def configure_url(self, value):
- # Falls back to the 1.3 STATIC_URL setting by default or falls back to MEDIA_URL
+ # Uses Django 1.3's STATIC_URL by default or falls back to MEDIA_URL
if value is None:
- value = getattr(settings, 'STATIC_URL', None)
+ value = getattr(settings, "STATIC_URL", None)
if not value:
value = settings.MEDIA_URL
- if not value.endswith('/'):
- raise ImproperlyConfigured('The URL settings (e.g. COMPRESS_URL) '
- 'must have a trailing slash.')
+ if not value.endswith("/"):
+ raise ImproperlyConfigured("The URL settings (e.g. COMPRESS_URL) "
+ "must have a trailing slash.")
return value
def configure_cache_backend(self, value):
@@ -104,9 +108,15 @@ def configure_cache_backend(self, value):
def configure_offline_context(self, value):
if not value:
value = {
- 'MEDIA_URL': settings.MEDIA_URL,
+ "MEDIA_URL": settings.MEDIA_URL,
}
# Adds the 1.3 STATIC_URL setting to the context if available
- if getattr(settings, 'STATIC_URL', None):
- value['STATIC_URL'] = settings.STATIC_URL
+ if getattr(settings, "STATIC_URL", None):
+ value["STATIC_URL"] = settings.STATIC_URL
+ return value
+
+ def configure_precompilers(self, value):
+ if not isinstance(value, (list, tuple)):
+ raise ImproperlyConfigured("The COMPRESS_PRECOMPILERS setting "
+ "must be a list or tuple. Check for missing commas.")
return value
View
13 compressor/storage.py
@@ -1,10 +1,13 @@
import gzip
+from os import path
+from datetime import datetime
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.functional import LazyObject
from compressor.conf import settings
+
class CompressorFileStorage(FileSystemStorage):
"""
Standard file system storage for files handled by django-compressor.
@@ -21,6 +24,15 @@ def __init__(self, location=None, base_url=None, *args, **kwargs):
super(CompressorFileStorage, self).__init__(location, base_url,
*args, **kwargs)
+ def accessed_time(self, name):
+ return datetime.fromtimestamp(path.getatime(self.path(name)))
+
+ def created_time(self, name):
+ return datetime.fromtimestamp(path.getctime(self.path(name)))
+
+ def modified_time(self, name):
+ return datetime.fromtimestamp(path.getmtime(self.path(name)))
+
def get_available_name(self, name):
"""
Deletes the given file if it exists.
@@ -29,6 +41,7 @@ def get_available_name(self, name):
self.delete(name)
return name
+
class GzipCompressorFileStorage(CompressorFileStorage):
"""
The standard compressor file system storage that gzips storage files
View
2  compressor/templates/compressor/css.html
@@ -1 +1 @@
-<link rel="stylesheet" href="{{ url }}" type="text/css"{% if media %} media="{{ media }}"{% endif %}>
+{# left fot backwards compatibility #}{% include "compressor/css_file.html" %}
View
1  compressor/templates/compressor/css_file.html
@@ -0,0 +1 @@
+<link rel="stylesheet" href="{{ url }}" type="text/css"{% if media %} media="{{ media }}"{% endif %}>
View
2  compressor/templates/compressor/js.html
@@ -1 +1 @@
-<script type="text/javascript" src="{{ url }}" charset="utf-8"></script>
+{# left fot backwards compatibility #}{% include "compressor/js_file.html" %}
View
1  compressor/templates/compressor/js_file.html
@@ -0,0 +1 @@
+<script type="text/javascript" src="{{ url }}" charset="utf-8"></script>
View
19 compressor/templatetags/compress.py
@@ -33,11 +33,13 @@ def cache_get(self, key):
if (time.time() > refresh_time) and not refreshed:
# Store the stale value while the cache
# revalidates for another MINT_DELAY seconds.
- self.cache_set(key, val, timeout=settings.COMPRESS_MINT_DELAY, refreshed=True)
+ self.cache_set(key, val, refreshed=True,
+ timeout=settings.COMPRESS_MINT_DELAY)
return None
return val
- def cache_set(self, key, val, timeout=settings.COMPRESS_REBUILD_TIMEOUT, refreshed=False):
+ def cache_set(self, key, val, refreshed=False,
+ timeout=settings.COMPRESS_REBUILD_TIMEOUT):
refresh_time = timeout + time.time()
real_timeout = timeout + settings.COMPRESS_MINT_DELAY
packed_val = (val, refresh_time, refreshed)
@@ -47,22 +49,21 @@ def cache_key(self, compressor):
return "%s.%s.%s" % (compressor.cachekey, self.mode, self.kind)
def render(self, context, forced=False):
- if (settings.COMPRESS_ENABLED and settings.COMPRESS_OFFLINE) and not forced:
- key = get_offline_cachekey(self.nodelist)
- content = cache.get(key)
+ if (settings.COMPRESS_ENABLED and
+ settings.COMPRESS_OFFLINE) and not forced:
+ content = cache.get(get_offline_cachekey(self.nodelist))
if content:
return content
content = self.nodelist.render(context)
- if (not settings.COMPRESS_ENABLED or not len(content.strip())) and not forced:
+ if (not settings.COMPRESS_ENABLED or
+ not len(content.strip())) and not forced:
return content
compressor = self.compressor_cls(content)
cachekey = self.cache_key(compressor)
output = self.cache_get(cachekey)
if output is None or forced:
try:
- if self.mode == OUTPUT_INLINE:
- return compressor.output_inline()
- output = compressor.output(forced=forced)
+ output = compressor.output(self.mode, forced=forced)
self.cache_set(cachekey, output)
except:
if settings.DEBUG:
View
1  compressor/tests/media/custom/js/3f33b9146e12.js
@@ -1 +0,0 @@
-obj={};obj.value="value";
View
1  compressor/tests/media/custom/nested/js/3f33b9146e12.js
@@ -1 +0,0 @@
-obj={};obj.value="value";
View
1  compressor/tests/media/js/3f33b9146e12.js
@@ -1 +0,0 @@
-obj={};obj.value="value";
View
16 compressor/tests/tests.py
@@ -25,6 +25,7 @@ class CompressorTestCase(TestCase):
def setUp(self):
settings.COMPRESS_ENABLED = True
+ settings.PRECOMPILERS = {}
self.css = """
<link rel="stylesheet" href="/media/css/one.css" type="text/css" charset="utf-8">
<style type="text/css">p { border:5px solid green;}</style>
@@ -71,7 +72,7 @@ def test_cachekey(self):
self.assert_(is_cachekey.match(self.css_node.cachekey), "cachekey is returning something that doesn't look like r'django_compressor\.%s\.\w{12}'" % host_name)
def test_css_hash(self):
- self.assertEqual('f7c661b7a124', self.css_node.hash)
+ self.assertEqual('f7c661b7a124', self.css_node.hash(self.css_node.concat))
def test_css_return_if_on(self):
output = u'<link rel="stylesheet" href="/media/CACHE/css/f7c661b7a124.css" type="text/css">'
@@ -91,7 +92,7 @@ def test_js_hunks(self):
def test_js_concat(self):
out = u'obj = {};\nobj.value = "value";'
- self.assertEqual(out, self.js_node.concat())
+ self.assertEqual(out, self.js_node.concat)
def test_js_output(self):
out = u'obj={};obj.value="value";'
@@ -100,10 +101,13 @@ def test_js_output(self):
def test_js_return_if_off(self):
try:
enabled = settings.COMPRESS_ENABLED
+ precompilers = settings.COMPRESS_PRECOMPILERS
settings.COMPRESS_ENABLED = False
+ settings.COMPRESS_PRECOMPILERS = {}
self.assertEqual(self.js, self.js_node.output())
finally:
settings.COMPRESS_ENABLED = enabled
+ settings.COMPRESS_PRECOMPILERS = precompilers
def test_js_return_if_on(self):
output = u'<script type="text/javascript" src="/media/CACHE/js/3f33b9146e12.js" charset="utf-8"></script>'
@@ -384,10 +388,10 @@ def tearDown(self):
def test_offline(self):
count, result = CompressCommand().compress()
self.assertEqual(2, count)
- self.assertEqual(result, [
+ self.assertEqual([
u'<link rel="stylesheet" href="/media/CACHE/css/a55e1cf95000.css" type="text/css">\n',
u'<script type="text/javascript" src="/media/CACHE/js/bf53fa5b13e2.js" charset="utf-8"></script>',
- ])
+ ], result)
def test_offline_with_context(self):
self._old_offline_context = settings.COMPRESS_OFFLINE_CONTEXT
@@ -396,8 +400,8 @@ def test_offline_with_context(self):
}
count, result = CompressCommand().compress()
self.assertEqual(2, count)
- self.assertEqual(result, [
+ self.assertEqual([
u'<link rel="stylesheet" href="/media/CACHE/css/8a2405e029de.css" type="text/css">\n',
u'<script type="text/javascript" src="/media/CACHE/js/bf53fa5b13e2.js" charset="utf-8"></script>',
- ])
+ ], result)
settings.COMPRESS_OFFLINE_CONTEXT = self._old_offline_context
View
612 compressor/utils.py
@@ -1,4 +1,6 @@
+# -*- coding: utf-8 -*-
import os
+import re
import sys
from inspect import getmembers
from shlex import split as cmd_split
@@ -9,13 +11,16 @@
try:
any = any
+
except NameError:
+
def any(seq):
for item in seq:
if item:
return True
return False
+
def get_class(class_string, exception=FilterError):
"""
Convert a string version of a function name to the callable object.
@@ -32,6 +37,7 @@ def get_class(class_string, exception=FilterError):
return cls
raise exception('Failed to import %s' % class_string)
+
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
@@ -41,7 +47,8 @@ def get_mod_func(callback):
dot = callback.rindex('.')
except ValueError:
return callback, ''
- return callback[:dot], callback[dot+1:]
+ return callback[:dot], callback[dot + 1:]
+
def walk(root, topdown=True, onerror=None, followlinks=False):
"""
@@ -56,208 +63,471 @@ def walk(root, topdown=True, onerror=None, followlinks=False):
for link_dirpath, link_dirnames, link_filenames in walk(p):
yield (link_dirpath, link_dirnames, link_filenames)
-# Taken from Django 1.3-beta1 and before that from Python 2.7 with permission from/by the original author.
+
+# Taken from Django 1.3 and before that from Python 2.7
+# with permission from the original author.
def _resolve_name(name, package, level):
- """Return the absolute name of the module to be imported."""
- if not hasattr(package, 'rindex'):
- raise ValueError("'package' not set to a string")
- dot = len(package)
- for x in xrange(level, 1, -1):
- try:
- dot = package.rindex('.', 0, dot)
- except ValueError:
- raise ValueError("attempted relative import beyond top-level "
- "package")
- return "%s.%s" % (package[:dot], name)
+ """Return the absolute name of the module to be imported."""
+ if not hasattr(package, 'rindex'):
+ raise ValueError("'package' not set to a string")
+ dot = len(package)
+ for x in xrange(level, 1, -1):
+ try:
+ dot = package.rindex('.', 0, dot)
+ except ValueError:
+ raise ValueError("attempted relative import beyond top-level "
+ "package")
+ return "%s.%s" % (package[:dot], name)
+
def import_module(name, package=None):
- """Import a module.
-
- The 'package' argument is required when performing a relative import. It
- specifies the package to use as the anchor point from which to resolve the
- relative import to an absolute import.
-
- """
- if name.startswith('.'):
- if not package:
- raise TypeError("relative imports require the 'package' argument")
- level = 0
- for character in name:
- if character != '.':
- break
- level += 1
- name = _resolve_name(name[level:], package, level)
- __import__(name)
- return sys.modules[name]
+ """Import a module.
+
+ The 'package' argument is required when performing a relative import. It
+ specifies the package to use as the anchor point from which to resolve the
+ relative import to an absolute import.
+
+ """
+ if name.startswith('.'):
+ if not package:
+ raise TypeError("relative imports require the 'package' argument")
+ level = 0
+ for character in name:
+ if character != '.':
+ break
+ level += 1
+ name = _resolve_name(name[level:], package, level)
+ __import__(name)
+ return sys.modules[name]
class AppSettings(object):
- """
- An app setting object to be used for handling app setting defaults
- gracefully and providing a nice API for them. Say you have an app
- called ``myapp`` and want to define a few defaults, and refer to the
- defaults easily in the apps code. Add a ``settings.py`` to your app::
+ """
+ An app setting object to be used for handling app setting defaults
+ gracefully and providing a nice API for them. Say you have an app
+ called ``myapp`` and want to define a few defaults, and refer to the
+ defaults easily in the apps code. Add a ``settings.py`` to your app::
- from path.to.utils import AppSettings
+ from path.to.utils import AppSettings
- class MyAppSettings(AppSettings):
- SETTING_1 = "one"
- SETTING_2 = (
- "two",
- )
+ class MyAppSettings(AppSettings):
+ SETTING_1 = "one"
+ SETTING_2 = (
+ "two",
+ )
- Then initialize the setting with the correct prefix in the location of
- of your choice, e.g. ``conf.py`` of the app module::
+ Then initialize the setting with the correct prefix in the location of
+ of your choice, e.g. ``conf.py`` of the app module::
- settings = MyAppSettings(prefix="MYAPP")
+ settings = MyAppSettings(prefix="MYAPP")
- The ``MyAppSettings`` instance will automatically look at Django's
- global setting to determine each of the settings and respect the
- provided ``prefix``. E.g. adding this to your site's ``settings.py``
- will set the ``SETTING_1`` setting accordingly::
+ The ``MyAppSettings`` instance will automatically look at Django's
+ global setting to determine each of the settings and respect the
+ provided ``prefix``. E.g. adding this to your site's ``settings.py``
+ will set the ``SETTING_1`` setting accordingly::
- MYAPP_SETTING_1 = "uno"
+ MYAPP_SETTING_1 = "uno"
- Usage
- -----
+ Usage
+ -----
- Instead of using ``from django.conf import settings`` as you would
- usually do, you can switch to using your apps own settings module
- to access the app settings::
+ Instead of using ``from django.conf import settings`` as you would
+ usually do, you can switch to using your apps own settings module
+ to access the app settings::
- from myapp.conf import settings
+ from myapp.conf import settings
- print myapp_settings.MYAPP_SETTING_1
+ print myapp_settings.MYAPP_SETTING_1
- ``AppSettings`` instances also work as pass-throughs for other
- global settings that aren't related to the app. For example the
- following code is perfectly valid::
+ ``AppSettings`` instances also work as pass-throughs for other
+ global settings that aren't related to the app. For example the
+ following code is perfectly valid::
- from myapp.conf import settings
+ from myapp.conf import settings
- if "myapp" in settings.INSTALLED_APPS:
- print "yay, myapp is installed!"
+ if "myapp" in settings.INSTALLED_APPS:
+ print "yay, myapp is installed!"
- Custom handling
- ---------------
+ Custom handling
+ ---------------
- Each of the settings can be individually configured with callbacks.
- For example, in case a value of a setting depends on other settings
- or other dependencies. The following example sets one setting to a
- different value depending on a global setting::
+ Each of the settings can be individually configured with callbacks.
+ For example, in case a value of a setting depends on other settings
+ or other dependencies. The following example sets one setting to a
+ different value depending on a global setting::
- from django.conf import settings
+ from django.conf import settings
- class MyCustomAppSettings(AppSettings):
- ENABLED = True
+ class MyCustomAppSettings(AppSettings):
+ ENABLED = True
- def configure_enabled(self, value):
- return value and not self.DEBUG
+ def configure_enabled(self, value):
+ return value and not self.DEBUG
- custom_settings = MyCustomAppSettings("MYAPP")
+ custom_settings = MyCustomAppSettings("MYAPP")
- The value of ``custom_settings.MYAPP_ENABLED`` will vary depending on the
- value of the global ``DEBUG`` setting.
+ The value of ``custom_settings.MYAPP_ENABLED`` will vary depending on the
+ value of the global ``DEBUG`` setting.
- Each of the app settings can be customized by providing
- a method ``configure_<lower_setting_name>`` that takes the default
- value as defined in the class attributes as the only parameter.
- The method needs to return the value to be use for the setting in
- question.
- """
- def __dir__(self):
- return sorted(list(set(self.__dict__.keys() + dir(settings))))
+ Each of the app settings can be customized by providing
+ a method ``configure_<lower_setting_name>`` that takes the default
+ value as defined in the class attributes as the only parameter.
+ The method needs to return the value to be use for the setting in
+ question.
+ """
+ def __dir__(self):
+ return sorted(list(set(self.__dict__.keys() + dir(settings))))
- __members__ = lambda self: self.__dir__()
+ __members__ = lambda self: self.__dir__()
- def __getattr__(self, name):
- if name.startswith(self._prefix):
- raise AttributeError("%r object has no attribute %r" %
- (self.__class__.__name__, name))
- return getattr(settings, name)
+ def __getattr__(self, name):
+ if name.startswith(self._prefix):
+ raise AttributeError("%r object has no attribute %r" %
+ (self.__class__.__name__, name))
+ return getattr(settings, name)
- def __setattr__(self, name, value):
- super(AppSettings, self).__setattr__(name, value)
- if name in dir(settings):
- setattr(settings, name, value)
+ def __setattr__(self, name, value):
+ super(AppSettings, self).__setattr__(name, value)
+ if name in dir(settings):
+ setattr(settings, name, value)
- def __init__(self, prefix):
- super(AppSettings, self).__setattr__('_prefix', prefix)
- for name, value in filter(self.issetting, getmembers(self.__class__)):
- prefixed_name = "%s_%s" % (prefix.upper(), name.upper())
- value = getattr(settings, prefixed_name, value)
- callback = getattr(self, "configure_%s" % name.lower(), None)
- if callable(callback):
- value = callback(value)
- delattr(self.__class__, name)
- setattr(self, prefixed_name, value)
+ def __init__(self, prefix):
+ super(AppSettings, self).__setattr__('_prefix', prefix)
+ for name, value in filter(self.issetting, getmembers(self.__class__)):
+ prefixed_name = "%s_%s" % (prefix.upper(), name.upper())
+ value = getattr(settings, prefixed_name, value)
+ callback = getattr(self, "configure_%s" % name.lower(), None)
+ if callable(callback):
+ value = callback(value)
+ delattr(self.__class__, name)
+ setattr(self, prefixed_name, value)
- def issetting(self, (name, value)):
- return name == name.upper()
+ def issetting(self, (name, value)):
+ return name == name.upper()
class cached_property(object):
- """Property descriptor that caches the return value
- of the get function.
-
- *Examples*
-
- .. code-block:: python
-
- @cached_property
- def connection(self):
- return Connection()
-
- @connection.setter # Prepares stored value
- def connection(self, value):
- if value is None:
- raise TypeError("Connection must be a connection")
- return value
-
- @connection.deleter
- def connection(self, value):
- # Additional action to do at del(self.attr)
- if value is not None:
- print("Connection %r deleted" % (value, ))
- """
-
- def __init__(self, fget=None, fset=None, fdel=None, doc=None):
- self.__get = fget
- self.__set = fset
- self.__del = fdel
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
- self.__module__ = fget.__module__
-
- def __get__(self, obj, type=None):
- if obj is None:
- return self
- try:
- return obj.__dict__[self.__name__]
- except KeyError:
- value = obj.__dict__[self.__name__] = self.__get(obj)
- return value
-
- def __set__(self, obj, value):
- if obj is None:
- return self
- if self.__set is not None:
- value = self.__set(obj, value)
- obj.__dict__[self.__name__] = value
-
- def __delete__(self, obj):
- if obj is None:
- return self
- try:
- value = obj.__dict__.pop(self.__name__)
- except KeyError:
- pass
- else:
- if self.__del is not None:
- self.__del(obj, value)
-
- def setter(self, fset):
- return self.__class__(self.__get, fset, self.__del)
-
- def deleter(self, fdel):
- return self.__class__(self.__get, self.__set, fdel)
+ """Property descriptor that caches the return value
+ of the get function.
+
+ *Examples*
+
+ .. code-block:: python
+
+ @cached_property
+ def connection(self):
+ return Connection()
+
+ @connection.setter # Prepares stored value
+ def connection(self, value):
+ if value is None:
+ raise TypeError("Connection must be a connection")
+ return value
+
+ @connection.deleter
+ def connection(self, value):
+ # Additional action to do at del(self.attr)
+ if value is not None:
+ print("Connection %r deleted" % (value, ))
+ """
+ def __init__(self, fget=None, fset=None, fdel=None, doc=None):
+ self.__get = fget
+ self.__set = fset
+ self.__del = fdel
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+ self.__module__ = fget.__module__
+
+ def __get__(self, obj, type=None):
+ if obj is None:
+ return self
+ try:
+ return obj.__dict__[self.__name__]
+ except KeyError:
+ value = obj.__dict__[self.__name__] = self.__get(obj)
+ return value
+
+ def __set__(self, obj, value):
+ if obj is None:
+ return self
+ if self.__set is not None:
+ value = self.__set(obj, value)
+ obj.__dict__[self.__name__] = value
+
+ def __delete__(self, obj):
+ if obj is None:
+ return self
+ try:
+ value = obj.__dict__.pop(self.__name__)
+ except KeyError:
+ pass
+ else:
+ if self.__del is not None:
+ self.__del(obj, value)
+
+ def setter(self, fset):
+ return self.__class__(self.__get, fset, self.__del)
+
+ def deleter(self, fdel):
+ return self.__class__(self.__get, self.__set, fdel)
+
+
+"""Advanced string formatting for Python >= 2.4.
+
+An implementation of the advanced string formatting (PEP 3101).
+
+Author: Florent Xicluna
+"""
+
+if hasattr(str, 'partition'):
+ def partition(s, sep):
+ return s.partition(sep)
+else: # Python 2.4
+ def partition(s, sep):
+ try:
+ left, right = s.split(sep, 1)
+ except ValueError:
+ return s, '', ''
+ return left, sep, right
+
+_format_str_re = re.compile(
+ r'((?<!{)(?:{{)+' # '{{'
+ r'|(?:}})+(?!})' # '}}
+ r'|{(?:[^{](?:[^{}]+|{[^{}]*})*)?})' # replacement field
+)
+_format_sub_re = re.compile(r'({[^{}]*})') # nested replacement field
+_format_spec_re = re.compile(
+ r'((?:[^{}]?[<>=^])?)' # alignment
+ r'([-+ ]?)' # sign
+ r'(#?)' r'(\d*)' r'(,?)' # base prefix, minimal width, thousands sep
+ r'((?:\.\d+)?)' # precision
+ r'(.?)$' # type
+)
+_field_part_re = re.compile(
+ r'(?:(\[)|\.|^)' # start or '.' or '['
+ r'((?(1)[^]]*|[^.[]*))' # part
+ r'(?(1)(?:\]|$)([^.[]+)?)' # ']' and invalid tail
+)
+
+if hasattr(re, '__version__'):
+ _format_str_sub = _format_str_re.sub
+else:
+ # Python 2.4 fails to preserve the Unicode type
+ def _format_str_sub(repl, s):
+ if isinstance(s, unicode):
+ return unicode(_format_str_re.sub(repl, s))
+ return _format_str_re.sub(repl, s)
+
+if hasattr(int, '__index__'):
+ def _is_integer(value):
+ return hasattr(value, '__index__')
+else: # Python 2.4
+ def _is_integer(value):
+ return isinstance(value, (int, long))
+
+
+def _strformat(value, format_spec=""):
+ """Internal string formatter.
+
+ It implements the Format Specification Mini-Language.
+ """
+ m = _format_spec_re.match(str(format_spec))
+ if not m:
+ raise ValueError('Invalid conversion specification')
+ align, sign, prefix, width, comma, precision, conversion = m.groups()
+ is_numeric = hasattr(value, '__float__')
+ is_integer = is_numeric and _is_integer(value)
+ if prefix and not is_integer:
+ raise ValueError('Alternate form (#) not allowed in %s format '
+ 'specifier' % (is_numeric and 'float' or 'string'))
+ if is_numeric and conversion == 'n':
+ # Default to 'd' for ints and 'g' for floats
+ conversion = is_integer and 'd' or 'g'
+ elif sign:
+ if not is_numeric:
+ raise ValueError("Sign not allowed in string format specifier")
+ if conversion == 'c':
+ raise ValueError("Sign not allowed with integer "
+ "format specifier 'c'")
+ if comma:
+ # TODO: thousand separator
+ pass
+ try:
+ if ((is_numeric and conversion == 's') or
+ (not is_integer and conversion in set('cdoxX'))):
+ raise ValueError
+ if conversion == 'c':
+ conversion = 's'
+ value = chr(value % 256)
+ rv = ('%' + prefix + precision + (conversion or 's')) % (value,)
+ except ValueError:
+ raise ValueError("Unknown format code %r for object of type %r" %
+ (conversion, value.__class__.__name__))
+ if sign not in '-' and value >= 0:
+ # sign in (' ', '+')
+ rv = sign + rv
+ if width:
+ zero = (width[0] == '0')
+ width = int(width)
+ else:
+ zero = False
+ width = 0
+ # Fastpath when alignment is not required
+ if width <= len(rv):
+ if not is_numeric and (align == '=' or (zero and not align)):
+ raise ValueError("'=' alignment not allowed in string format "
+ "specifier")
+ return rv
+ fill, align = align[:-1], align[-1:]
+ if not fill:
+ fill = zero and '0' or ' '
+ if align == '^':
+ padding = width - len(rv)
+ # tweak the formatting if the padding is odd
+ if padding % 2:
+ rv += fill
+ rv = rv.center(width, fill)
+ elif align == '=' or (zero and not align):
+ if not is_numeric:
+ raise ValueError("'=' alignment not allowed in string format "
+ "specifier")
+ if value < 0 or sign not in '-':
+ rv = rv[0] + rv[1:].rjust(width - 1, fill)
+ else:
+ rv = rv.rjust(width, fill)
+ elif align in ('>', '=') or (is_numeric and not align):
+ # numeric value right aligned by default
+ rv = rv.rjust(width, fill)
+ else:
+ rv = rv.ljust(width, fill)
+ return rv
+
+
+def _format_field(value, parts, conv, spec, want_bytes=False):
+ """Format a replacement field."""
+ for k, part, _ in parts:
+ if k:
+ if part.isdigit():
+ value = value[int(part)]
+ else:
+ value = value[part]
+ else:
+ value = getattr(value, part)
+ if conv:
+ value = ((conv == 'r') and '%r' or '%s') % (value,)
+ if hasattr(value, '__format__'):
+ value = value.__format__(spec)
+ elif hasattr(value, 'strftime') and spec:
+ value = value.strftime(str(spec))
+ else:
+ value = _strformat(value, spec)
+ if want_bytes and isinstance(value, unicode):
+ return str(value)
+ return value
+
+
+class FormattableString(object):
+ """Class which implements method format().
+
+ The method format() behaves like str.format() in python 2.6+.
+
+ >>> FormattableString(u'{a:5}').format(a=42)
+ ... # Same as u'{a:5}'.format(a=42)
+ u' 42'
+
+ """
+
+ __slots__ = '_index', '_kwords', '_nested', '_string', 'format_string'
+
+ def __init__(self, format_string):
+ self._index = 0
+ self._kwords = {}
+ self._nested = {}
+
+ self.format_string = format_string
+ self._string = _format_str_sub(self._prepare, format_string)
+
+ def __eq__(self, other):
+ if isinstance(other, FormattableString):
+ return self.format_string == other.format_string
+ # Compare equal with the original string.
+ return self.format_string == other
+
+ def _prepare(self, match):
+ # Called for each replacement field.
+ part = match.group(0)
+ if part[0] == part[-1]:
+ # '{{' or '}}'
+ assert part == part[0] * len(part)
+ return part[:len(part) //