Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

[1.3.X] Fixed #17737 -- Stopped the collectstatic management command …

…from copying the wrong file in repeated runs. Thanks, pigletto.

Backport from trunk (r17612).

git-svn-id: http://code.djangoproject.com/svn/django/branches/releases/1.3.X@17613 bcc190cf-cafb-0310-a4f2-bffc1f526a37
  • Loading branch information...
commit 523d6167d6075eb3b9cb3aef71feb940ea0be5a3 1 parent dad3e55
@jezdez jezdez authored
View
10 django/contrib/staticfiles/management/commands/collectstatic.py
@@ -76,6 +76,7 @@ def handle_noargs(self, **options):
if confirm != 'yes':
raise CommandError("Collecting static files cancelled.")
+ processed_files = []
for finder in finders.get_finders():
for path, storage in finder.list(ignore_patterns):
# Prefix the relative path if the source storage contains it
@@ -83,10 +84,13 @@ def handle_noargs(self, **options):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
+ if prefixed_path in processed_files:
+ continue
if symlink:
self.link_file(path, prefixed_path, storage, **options)
else:
self.copy_file(path, prefixed_path, storage, **options)
+ processed_files.append(prefixed_path)
actual_count = len(self.copied_files) + len(self.symlinked_files)
unmodified_count = len(self.unmodified_files)
@@ -196,9 +200,7 @@ def copy_file(self, path, prefixed_path, source_storage, **options):
os.makedirs(os.path.dirname(full_path))
except OSError:
pass
- shutil.copy2(source_path, full_path)
- else:
- source_file = source_storage.open(path)
- self.storage.save(prefixed_path, source_file)
+ source_file = source_storage.open(path)
+ self.storage.save(prefixed_path, source_file)
if not prefixed_path in self.copied_files:
self.copied_files.append(prefixed_path)
View
52 tests/regressiontests/staticfiles_tests/tests.py
@@ -245,6 +245,58 @@ def test_no_common_ignore_patterns(self):
self.assertFileContains('test/CVS', 'should be ignored')
+class TestCollectionFilesOverride(BuildStaticTestCase):
+ """
+ Test overriding duplicated files by ``collectstatic`` management command.
+ Check for proper handling of apps order in INSTALLED_APPS even if file
+ modification dates are in different order:
+
+ 'regressiontests.staticfiles_tests.apps.test',
+ 'regressiontests.staticfiles_tests.apps.no_label',
+ """
+ def setUp(self):
+ self.orig_path = os.path.join(TEST_ROOT, 'apps', 'no_label', 'static', 'file2.txt')
+ # get modification and access times for no_label/static/file2.txt
+ self.orig_mtime = os.path.getmtime(self.orig_path)
+ self.orig_atime = os.path.getatime(self.orig_path)
+
+ # prepare duplicate of file2.txt from no_label app
+ # this file will have modification time older than no_label/static/file2.txt
+ # anyway it should be taken to STATIC_ROOT because 'test' app is before
+ # 'no_label' app in INSTALLED_APPS
+ self.testfile_path = os.path.join(TEST_ROOT, 'apps', 'test', 'static', 'file2.txt')
+ f = open(self.testfile_path, 'w+')
+ f.write('duplicate of file2.txt')
+ f.close()
+ os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1))
+ super(TestCollectionFilesOverride, self).setUp()
+
+ def tearDown(self):
+ if os.path.exists(self.testfile_path):
+ os.unlink(self.testfile_path)
+ # set back original modification time
+ os.utime(self.orig_path, (self.orig_atime, self.orig_mtime))
+
+ def test_override(self):
+ self.assertFileContains('file2.txt', 'duplicate of file2.txt')
+
+ # run collectstatic again
+ self.run_collectstatic()
+
+ self.assertFileContains('file2.txt', 'duplicate of file2.txt')
+
+ # and now change modification time of no_label/static/file2.txt
+ # test app is first in INSTALLED_APPS so file2.txt should remain unmodified
+ mtime = os.path.getmtime(self.testfile_path)
+ atime = os.path.getatime(self.testfile_path)
+ os.utime(self.orig_path, (mtime + 1, atime + 1))
+
+ # run collectstatic again
+ self.run_collectstatic()
+
+ self.assertFileContains('file2.txt', 'duplicate of file2.txt')
+
+
class TestNoFilesCreated(object):
def test_no_files_created(self):
Please sign in to comment.
Something went wrong with that request. Please try again.