Permalink
Browse files

Merge branch 'release/0.6a11'

  • Loading branch information...
2 parents 2dc6734 + 4beb723 commit 4996394062e556603aa895d0f56d1d2589feb513 @jezdez jezdez committed Feb 10, 2011
View
@@ -18,3 +18,4 @@ Gert Van Gool
Justin Lilly
Maciek Szczesniak
Mehmet S. Catalbas
+Ulrich Petri
View
@@ -1,4 +1,4 @@
-VERSION = (0, 6, 0, "a", 10) # following PEP 386
+VERSION = (0, 6, 0, "a", 11) # following PEP 386
DEV_N = None
View
@@ -1,13 +1,13 @@
import os
-from django.conf import settings as django_settings
from django.template.loader import render_to_string
from django.core.files.base import ContentFile
-from compressor.conf import settings
from compressor import filters
+from compressor.cache import get_hexdigest, get_mtime
+from compressor.conf import settings
from compressor.exceptions import UncompressableFileError
-from compressor.utils import get_hexdigest, get_mtime, get_class
+from compressor.utils import get_class
class Compressor(object):
@@ -25,20 +25,20 @@ def get_filename(self, url):
try:
base_url = self.storage.base_url
except AttributeError:
- base_url = settings.MEDIA_URL
+ base_url = settings.COMPRESS_URL
if not url.startswith(base_url):
raise UncompressableFileError('"%s" is not in COMPRESS_URL ("%s") and can not be compressed' % (url, base_url))
basename = url.replace(base_url, "", 1)
- filename = os.path.join(settings.MEDIA_ROOT, basename)
+ filename = os.path.join(settings.COMPRESS_ROOT, basename)
if not os.path.exists(filename):
raise UncompressableFileError('"%s" does not exist' % filename)
return filename
def _get_parser(self):
if self._parser:
return self._parser
- parser_cls = get_class(settings.PARSER)
+ parser_cls = get_class(settings.COMPRESS_PARSER)
self._parser = parser_cls(self.content)
return self._parser
@@ -54,7 +54,7 @@ def mtimes(self):
def cachekey(self):
cachebits = [self.content]
cachebits.extend([str(m) for m in self.mtimes])
- cachestr = "".join(cachebits).encode(django_settings.DEFAULT_CHARSET)
+ cachestr = "".join(cachebits).encode(settings.DEFAULT_CHARSET)
return "django_compressor.%s" % get_hexdigest(cachestr)[:12]
@property
@@ -82,7 +82,7 @@ def hunks(self):
input = fd.read()
if self.filters:
input = self.filter(input, 'input', filename=v, elem=elem)
- charset = attribs.get('charset', django_settings.DEFAULT_CHARSET)
+ charset = attribs.get('charset', settings.DEFAULT_CHARSET)
self._hunks.append(unicode(input, charset))
fd.close()
return self._hunks
@@ -91,11 +91,11 @@ def concat(self):
# Design decision needed: either everything should be unicode up to
# here or we encode strings as soon as we acquire them. Currently
# concat() expects all hunks to be unicode and does the encoding
- return "\n".join([hunk.encode(django_settings.DEFAULT_CHARSET) for hunk in self.hunks])
+ return "\n".join([hunk.encode(settings.DEFAULT_CHARSET) for hunk in self.hunks])
def filter(self, content, method, **kwargs):
for f in self.filters:
- filter = getattr(filters.get_class(f)(content, filter_type=self.type), method)
+ filter = getattr(get_class(f)(content, filter_type=self.type), method)
try:
if callable(filter):
content = filter(**kwargs)
@@ -121,7 +121,7 @@ def hash(self):
def new_filepath(self):
filename = "".join([self.hash, self.extension])
return os.path.join(
- settings.OUTPUT_DIR.strip(os.sep), self.output_prefix, filename)
+ settings.COMPRESS_OUTPUT_DIR.strip(os.sep), self.output_prefix, filename)
def save_file(self):
if self.storage.exists(self.new_filepath):
@@ -130,15 +130,15 @@ def save_file(self):
return True
def output(self):
- if not settings.COMPRESS:
+ if not settings.COMPRESS_ENABLED:
return self.content
self.save_file()
context = getattr(self, 'extra_context', {})
context['url'] = self.storage.url(self.new_filepath)
return render_to_string(self.template_name, context)
def output_inline(self):
- context = {'content': settings.COMPRESS and self.combined or self.concat()}
+ context = {'content': settings.COMPRESS_ENABLED and self.combined or self.concat()}
if hasattr(self, 'extra_context'):
context.update(self.extra_context)
return render_to_string(self.template_name_inline, context)
View
@@ -1,5 +1,34 @@
+import os
+
from django.core.cache import get_cache
+from django.utils.encoding import smart_str
+from django.utils.hashcompat import sha_constructor
from compressor.conf import settings
-cache = get_cache(settings.CACHE_BACKEND)
+def get_hexdigest(plaintext):
+ return sha_constructor(plaintext).hexdigest()
+
+def get_mtime_cachekey(filename):
+ return "django_compressor.mtime.%s" % get_hexdigest(filename)
+
+def get_offline_cachekey(source):
+ return ("django_compressor.offline.%s" %
+ get_hexdigest("".join(smart_str(s) for s in source)))
+
+def get_mtime(filename):
+ if settings.COMPRESS_MTIME_DELAY:
+ key = get_mtime_cachekey(filename)
+ mtime = cache.get(key)
+ if mtime is None:
+ mtime = os.path.getmtime(filename)
+ cache.set(key, mtime, settings.COMPRESS_MTIME_DELAY)
+ return mtime
+ return os.path.getmtime(filename)
+
+def get_hashed_mtime(filename, length=12):
+ filename = os.path.realpath(filename)
+ mtime = str(int(get_mtime(filename)))
+ return get_hexdigest(mtime)[:length]
+
+cache = get_cache(settings.COMPRESS_CACHE_BACKEND)
View
@@ -0,0 +1,3 @@
+from compressor.settings import CompressorSettings
+
+settings = CompressorSettings(prefix="COMPRESS")
No changes.
@@ -1,79 +0,0 @@
-from django.core.exceptions import ImproperlyConfigured
-from django.conf import settings
-
-MEDIA_URL = getattr(settings, 'COMPRESS_URL', settings.MEDIA_URL)
-if not MEDIA_URL.endswith('/'):
- raise ImproperlyConfigured(
- 'The MEDIA_URL and COMPRESS_URL settings must have a trailing slash.')
-
-MEDIA_ROOT = getattr(settings, 'COMPRESS_ROOT', settings.MEDIA_ROOT)
-OUTPUT_DIR = getattr(settings, 'COMPRESS_OUTPUT_DIR', 'cache')
-STORAGE = getattr(settings, 'COMPRESS_STORAGE', 'compressor.storage.CompressorFileStorage')
-
-COMPRESS = getattr(settings, 'COMPRESS', not settings.DEBUG)
-COMPRESS_CSS_FILTERS = getattr(settings, 'COMPRESS_CSS_FILTERS', ['compressor.filters.css_default.CssAbsoluteFilter'])
-COMPRESS_JS_FILTERS = getattr(settings, 'COMPRESS_JS_FILTERS', ['compressor.filters.jsmin.JSMinFilter'])
-
-COMPRESS_LESSC_BINARY = LESSC_BINARY = getattr(settings, 'COMPRESS_LESSC_BINARY', 'lessc')
-
-CLOSURE_COMPILER_BINARY = getattr(settings, 'COMPRESS_CLOSURE_COMPILER_BINARY', 'java -jar compiler.jar')
-CLOSURE_COMPILER_ARGUMENTS = getattr(settings, 'COMPRESS_CLOSURE_COMPILER_ARGUMENTS', '')
-
-CSSTIDY_BINARY = getattr(settings, 'CSSTIDY_BINARY',
- getattr(settings, 'COMPRESS_CSSTIDY_BINARY', 'csstidy'))
-CSSTIDY_ARGUMENTS = getattr(settings, 'CSSTIDY_ARGUMENTS',
- getattr(settings, 'COMPRESS_CSSTIDY_ARGUMENTS', '--template=highest'))
-
-YUI_BINARY = getattr(settings, 'COMPRESS_YUI_BINARY', 'java -jar yuicompressor.jar')
-YUI_CSS_ARGUMENTS = getattr(settings, 'COMPRESS_YUI_CSS_ARGUMENTS', '')
-YUI_JS_ARGUMENTS = getattr(settings, 'COMPRESS_YUI_JS_ARGUMENTS', '')
-
-if COMPRESS_CSS_FILTERS is None:
- COMPRESS_CSS_FILTERS = []
-
-if COMPRESS_JS_FILTERS is None:
- COMPRESS_JS_FILTERS = []
-
-DATA_URI_MIN_SIZE = getattr(settings, 'COMPRESS_DATA_URI_MIN_SIZE', 1024)
-
-# rebuilds the cache every 30 days if nothing has changed.
-REBUILD_TIMEOUT = getattr(settings, 'COMPRESS_REBUILD_TIMEOUT', 60 * 60 * 24 * 30) # 30 days
-
-# the upper bound on how long any compression should take to be generated
-# (used against dog piling, should be a lot smaller than REBUILD_TIMEOUT
-MINT_DELAY = getattr(settings, 'COMPRESS_MINT_DELAY', 30) # 30 seconds
-
-# check for file changes only after a delay (in seconds, disabled by default)
-MTIME_DELAY = getattr(settings, 'COMPRESS_MTIME_DELAY', None)
-
-# the backend to use when parsing the JavaScript or Stylesheet files
-PARSER = getattr(settings, 'COMPRESS_PARSER', 'compressor.parser.BeautifulSoupParser')
-
-# Allows changing verbosity from the settings.
-VERBOSE = getattr(settings, "COMPRESS_VERBOSE", False)
-
-# the cache backend to use
-CACHE_BACKEND = getattr(settings, 'COMPRESS_CACHE_BACKEND', None)
-if CACHE_BACKEND is None:
- # If we are on Django 1.3 AND using the new CACHES setting...
- if getattr(settings, "CACHES", None):
- CACHE_BACKEND = "default"
- else:
- # fallback for people still using the old CACHE_BACKEND setting
- CACHE_BACKEND = settings.CACHE_BACKEND
-
-# enables the offline cache -- a cache that is filled by the compress management command
-OFFLINE = getattr(settings, 'COMPRESS_OFFLINE', False)
-
-# invalidates the offline cache after one year
-OFFLINE_TIMEOUT = getattr(settings, 'COMPRESS_OFFLINE_TIMEOUT', 60 * 60 * 24 * 365) # 1 year
-
-# The context to be used when compressing the files "offline"
-OFFLINE_CONTEXT = getattr(settings, 'COMPRESS_OFFLINE_CONTEXT', {})
-if not OFFLINE_CONTEXT:
- OFFLINE_CONTEXT = {
- 'MEDIA_URL': settings.MEDIA_URL,
- }
- # Adds the 1.3 STATIC_URL setting to the context if available
- if getattr(settings, 'STATIC_URL', None):
- OFFLINE_CONTEXT['STATIC_URL'] = settings.STATIC_URL
View
@@ -1,5 +1,3 @@
-from django.conf import settings as django_settings
-
from compressor.conf import settings
from compressor.base import Compressor
from compressor.exceptions import UncompressableFileError
@@ -27,7 +25,7 @@ def split_contents(self):
content = self.parser.elem_content(elem)
data = ('file', self.get_filename(elem_attribs['href']), elem)
except UncompressableFileError:
- if django_settings.DEBUG:
+ if settings.DEBUG:
raise
elif elem_name == 'style':
data = ('hunk', self.parser.elem_content(elem), elem)
@@ -48,7 +46,7 @@ def output(self):
self.split_contents()
if not hasattr(self, 'media_nodes'):
return super(CssCompressor, self).output()
- if not settings.COMPRESS:
+ if not settings.COMPRESS_ENABLED:
return self.content
ret = []
for media, subnode in self.media_nodes:
@@ -1,12 +1,11 @@
-from compressor.exceptions import FilterError
-from compressor.utils import get_class, get_mod_func
from compressor.conf import settings
+from compressor.exceptions import FilterError
class FilterBase(object):
def __init__(self, content, filter_type=None, verbose=0):
self.type = filter_type
self.content = content
- self.verbose = verbose or settings.VERBOSE
+ self.verbose = verbose or settings.COMPRESS_VERBOSE
def input(self, **kwargs):
raise NotImplementedError
@@ -8,9 +8,9 @@
class ClosureCompilerFilter(FilterBase):
def output(self, **kwargs):
- arguments = settings.CLOSURE_COMPILER_ARGUMENTS
+ arguments = settings.COMPRESS_CLOSURE_COMPILER_ARGUMENTS
- command = '%s %s' % (settings.CLOSURE_COMPILER_BINARY, arguments)
+ command = '%s %s' % (settings.COMPRESS_CLOSURE_COMPILER_BINARY, arguments)
try:
p = Popen(cmd_split(command), stdout=PIPE, stdin=PIPE, stderr=PIPE)
@@ -2,47 +2,63 @@
import re
import posixpath
-from compressor.filters import FilterBase, FilterError
+from compressor.cache import get_hashed_mtime
from compressor.conf import settings
-from compressor.utils import get_hexdigest, get_mtime
+from compressor.filters import FilterBase
URL_PATTERN = re.compile(r'url\(([^\)]+)\)')
class CssAbsoluteFilter(FilterBase):
def input(self, filename=None, **kwargs):
- media_root = os.path.normcase(os.path.abspath(settings.MEDIA_ROOT))
+ self.root = os.path.normcase(os.path.abspath(settings.COMPRESS_ROOT))
if filename is not None:
filename = os.path.normcase(os.path.abspath(filename))
- if not filename or not filename.startswith(media_root):
+ if not filename or not filename.startswith(self.root):
return self.content
- self.media_path = filename[len(media_root):].replace(os.sep, '/')
- self.media_path = self.media_path.lstrip('/')
- self.media_url = settings.MEDIA_URL.rstrip('/')
+ self.path = filename[len(self.root):].replace(os.sep, '/')
+ self.path = self.path.lstrip('/')
+ self.url = settings.COMPRESS_URL.rstrip('/')
+ self.url_path = self.url
try:
- mtime = get_mtime(filename)
- self.mtime = get_hexdigest(str(int(mtime)))[:12]
+ self.mtime = get_hashed_mtime(filename)
except OSError:
self.mtime = None
self.has_http = False
- if self.media_url.startswith('http://') or self.media_url.startswith('https://'):
+ if self.url.startswith('http://') or self.url.startswith('https://'):
self.has_http = True
- parts = self.media_url.split('/')
- self.media_url = '/'.join(parts[2:])
+ parts = self.url.split('/')
+ self.url = '/'.join(parts[2:])
+ self.url_path = '/%s' % '/'.join(parts[3:])
self.protocol = '%s/' % '/'.join(parts[:2])
- self.directory_name = '/'.join([self.media_url, os.path.dirname(self.media_path)])
+ self.host = parts[2]
+ self.directory_name = '/'.join([self.url, os.path.dirname(self.path)])
output = URL_PATTERN.sub(self.url_converter, self.content)
return output
+ def guess_filename(self, url):
+ local_path = url
+ if self.has_http:
+ # COMPRESS_URL had a protocol, remove it and the hostname from our path.
+ local_path = local_path.replace(self.protocol + self.host, "", 1)
+ # Now, we just need to check if we can find the path from COMPRESS_URL in our url
+ if local_path.startswith(self.url_path):
+ local_path = local_path.replace(self.url_path, "", 1)
+ # Re-build the local full path by adding root
+ filename = os.path.join(self.root, local_path.lstrip(os.sep))
+ return os.path.exists(filename) and filename
+
def add_mtime(self, url):
- if self.mtime is None:
+ filename = self.guess_filename(url)
+ mtime = filename and get_hashed_mtime(filename) or self.mtime
+ if mtime is None:
return url
if (url.startswith('http://') or
url.startswith('https://') or
url.startswith('/')):
if "?" in url:
- return "%s&%s" % (url, self.mtime)
- return "%s?%s" % (url, self.mtime)
+ return "%s&%s" % (url, mtime)
+ return "%s?%s" % (url, mtime)
return url
def url_converter(self, matchobj):
@@ -1,4 +1,4 @@
-from compressor.filters import FilterBase, FilterError
+from compressor.filters import FilterBase
from compressor.filters.cssmin.cssmin import cssmin
class CSSMinFilter(FilterBase):
@@ -16,7 +16,7 @@ def output(self, **kwargs):
output_file = tempfile.NamedTemporaryFile(mode='w+b')
- command = '%s %s %s %s' % (settings.CSSTIDY_BINARY, tmp_file.name, settings.CSSTIDY_ARGUMENTS, output_file.name)
+ command = '%s %s %s %s' % (settings.COMPRESS_CSSTIDY_BINARY, tmp_file.name, settings.COMPRESS_CSSTIDY_ARGUMENTS, output_file.name)
command_output = Popen(command, shell=True,
stdout=PIPE, stdin=PIPE, stderr=PIPE).communicate()
Oops, something went wrong.

0 comments on commit 4996394

Please sign in to comment.