mirror of
https://github.com/django/django.git
synced 2025-11-24 04:47:12 +00:00
Minor bugfixing of the staticfiles app following upstream development in django-staticfiles.
- Create the files to ignore during the tests dynamically (.hidden and backup~) - Refactored the post_processing method of the CachedFilesMixin storage mixin to be less time consuming. - Refactored handling of fragments in the post_process method. git-svn-id: http://code.djangoproject.com/svn/django/trunk@17519 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
2df1847c9b
commit
4f1ac8f5f1
5 changed files with 189 additions and 87 deletions
|
|
@ -7,13 +7,14 @@ from optparse import make_option
|
|||
from django.core.files.storage import FileSystemStorage
|
||||
from django.core.management.base import CommandError, NoArgsCommand
|
||||
from django.utils.encoding import smart_str, smart_unicode
|
||||
from django.utils.datastructures import SortedDict
|
||||
|
||||
from django.contrib.staticfiles import finders, storage
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
"""
|
||||
Command that allows to copy or symlink media files from different
|
||||
Command that allows to copy or symlink static files from different
|
||||
locations to the settings.STATIC_ROOT.
|
||||
"""
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
|
|
@ -50,6 +51,7 @@ class Command(NoArgsCommand):
|
|||
self.copied_files = []
|
||||
self.symlinked_files = []
|
||||
self.unmodified_files = []
|
||||
self.post_processed_files = []
|
||||
self.storage = storage.staticfiles_storage
|
||||
try:
|
||||
self.storage.path('')
|
||||
|
|
@ -61,18 +63,27 @@ class Command(NoArgsCommand):
|
|||
if hasattr(os, 'stat_float_times'):
|
||||
os.stat_float_times(False)
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
def set_options(self, **options):
|
||||
"""
|
||||
Set instance variables based on an options dict
|
||||
"""
|
||||
self.interactive = options['interactive']
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.symlink = options['link']
|
||||
self.clear = options['clear']
|
||||
self.dry_run = options['dry_run']
|
||||
ignore_patterns = options['ignore_patterns']
|
||||
if options['use_default_ignore_patterns']:
|
||||
ignore_patterns += ['CVS', '.*', '*~']
|
||||
self.ignore_patterns = list(set(ignore_patterns))
|
||||
self.interactive = options['interactive']
|
||||
self.symlink = options['link']
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.post_process = options['post_process']
|
||||
|
||||
def collect(self):
|
||||
"""
|
||||
Perform the bulk of the work of collectstatic.
|
||||
|
||||
Split off from handle_noargs() to facilitate testing.
|
||||
"""
|
||||
if self.symlink:
|
||||
if sys.platform == 'win32':
|
||||
raise CommandError("Symlinking is not supported by this "
|
||||
|
|
@ -80,6 +91,46 @@ class Command(NoArgsCommand):
|
|||
if not self.local:
|
||||
raise CommandError("Can't symlink to a remote destination.")
|
||||
|
||||
if self.clear:
|
||||
self.clear_dir('')
|
||||
|
||||
if self.symlink:
|
||||
handler = self.link_file
|
||||
else:
|
||||
handler = self.copy_file
|
||||
|
||||
found_files = SortedDict()
|
||||
for finder in finders.get_finders():
|
||||
for path, storage in finder.list(self.ignore_patterns):
|
||||
# Prefix the relative path if the source storage contains it
|
||||
if getattr(storage, 'prefix', None):
|
||||
prefixed_path = os.path.join(storage.prefix, path)
|
||||
else:
|
||||
prefixed_path = path
|
||||
found_files[prefixed_path] = storage.open(path)
|
||||
handler(path, prefixed_path, storage)
|
||||
|
||||
# Here we check if the storage backend has a post_process
|
||||
# method and pass it the list of modified files.
|
||||
if self.post_process and hasattr(self.storage, 'post_process'):
|
||||
processor = self.storage.post_process(found_files,
|
||||
dry_run=self.dry_run)
|
||||
for original_path, processed_path, processed in processor:
|
||||
if processed:
|
||||
self.log(u"Post-processed '%s' as '%s" %
|
||||
(original_path, processed_path), level=1)
|
||||
self.post_processed_files.append(original_path)
|
||||
else:
|
||||
self.log(u"Skipped post-processing '%s'" % original_path)
|
||||
|
||||
return {
|
||||
'modified': self.copied_files + self.symlinked_files,
|
||||
'unmodified': self.unmodified_files,
|
||||
'post_processed': self.post_processed_files,
|
||||
}
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
self.set_options(**options)
|
||||
# Warn before doing anything more.
|
||||
if (isinstance(self.storage, FileSystemStorage) and
|
||||
self.storage.location):
|
||||
|
|
@ -107,49 +158,25 @@ Type 'yes' to continue, or 'no' to cancel: """
|
|||
if confirm != 'yes':
|
||||
raise CommandError("Collecting static files cancelled.")
|
||||
|
||||
if self.clear:
|
||||
self.clear_dir('')
|
||||
|
||||
handler = {
|
||||
True: self.link_file,
|
||||
False: self.copy_file,
|
||||
}[self.symlink]
|
||||
|
||||
found_files = []
|
||||
for finder in finders.get_finders():
|
||||
for path, storage in finder.list(self.ignore_patterns):
|
||||
# Prefix the relative path if the source storage contains it
|
||||
if getattr(storage, 'prefix', None):
|
||||
prefixed_path = os.path.join(storage.prefix, path)
|
||||
else:
|
||||
prefixed_path = path
|
||||
found_files.append(prefixed_path)
|
||||
handler(path, prefixed_path, storage)
|
||||
|
||||
# Here we check if the storage backend has a post_process
|
||||
# method and pass it the list of modified files.
|
||||
if self.post_process and hasattr(self.storage, 'post_process'):
|
||||
post_processed = self.storage.post_process(found_files, **options)
|
||||
for path in post_processed:
|
||||
self.log(u"Post-processed '%s'" % path, level=1)
|
||||
else:
|
||||
post_processed = []
|
||||
|
||||
modified_files = self.copied_files + self.symlinked_files
|
||||
actual_count = len(modified_files)
|
||||
unmodified_count = len(self.unmodified_files)
|
||||
collected = self.collect()
|
||||
modified_count = len(collected['modified'])
|
||||
unmodified_count = len(collected['unmodified'])
|
||||
post_processed_count = len(collected['post_processed'])
|
||||
|
||||
if self.verbosity >= 1:
|
||||
template = ("\n%(actual_count)s %(identifier)s %(action)s"
|
||||
"%(destination)s%(unmodified)s.\n")
|
||||
template = ("\n%(modified_count)s %(identifier)s %(action)s"
|
||||
"%(destination)s%(unmodified)s%(post_processed)s.\n")
|
||||
summary = template % {
|
||||
'actual_count': actual_count,
|
||||
'identifier': 'static file' + (actual_count > 1 and 's' or ''),
|
||||
'modified_count': modified_count,
|
||||
'identifier': 'static file' + (modified_count != 1 and 's' or ''),
|
||||
'action': self.symlink and 'symlinked' or 'copied',
|
||||
'destination': (destination_path and " to '%s'"
|
||||
% destination_path or ''),
|
||||
'unmodified': (self.unmodified_files and ', %s unmodified'
|
||||
'unmodified': (collected['unmodified'] and ', %s unmodified'
|
||||
% unmodified_count or ''),
|
||||
'post_processed': (collected['post_processed'] and
|
||||
', %s post-processed'
|
||||
% post_processed_count or ''),
|
||||
}
|
||||
self.stdout.write(smart_str(summary))
|
||||
|
||||
|
|
@ -180,21 +207,20 @@ Type 'yes' to continue, or 'no' to cancel: """
|
|||
self.clear_dir(os.path.join(path, d))
|
||||
|
||||
def delete_file(self, path, prefixed_path, source_storage):
|
||||
# Whether we are in symlink mode
|
||||
# Checks if the target file should be deleted if it already exists
|
||||
if self.storage.exists(prefixed_path):
|
||||
try:
|
||||
# When was the target file modified last time?
|
||||
target_last_modified = \
|
||||
self.storage.modified_time(prefixed_path)
|
||||
except (OSError, NotImplementedError):
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
# The storage doesn't support ``modified_time`` or failed
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
# When was the source file modified last time?
|
||||
source_last_modified = source_storage.modified_time(path)
|
||||
except (OSError, NotImplementedError):
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
pass
|
||||
else:
|
||||
# The full path of the target file
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import os
|
|||
import posixpath
|
||||
import re
|
||||
from urllib import unquote
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
from urlparse import urlsplit, urlunsplit, urldefrag
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import (get_cache, InvalidCacheBackendError,
|
||||
|
|
@ -12,10 +12,10 @@ from django.core.cache import (get_cache, InvalidCacheBackendError,
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.storage import FileSystemStorage, get_storage_class
|
||||
from django.utils.datastructures import SortedDict
|
||||
from django.utils.encoding import force_unicode, smart_str
|
||||
from django.utils.functional import LazyObject
|
||||
from django.utils.importlib import import_module
|
||||
from django.utils.datastructures import SortedDict
|
||||
|
||||
from django.contrib.staticfiles.utils import check_settings, matches_patterns
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ class CachedFilesMixin(object):
|
|||
try:
|
||||
content = self.open(clean_name)
|
||||
except IOError:
|
||||
# Handle directory paths
|
||||
# Handle directory paths and fragments
|
||||
return name
|
||||
path, filename = os.path.split(clean_name)
|
||||
root, ext = os.path.splitext(filename)
|
||||
|
|
@ -102,16 +102,31 @@ class CachedFilesMixin(object):
|
|||
Returns the real URL in DEBUG mode.
|
||||
"""
|
||||
if settings.DEBUG and not force:
|
||||
hashed_name = name
|
||||
hashed_name, fragment = name, ''
|
||||
else:
|
||||
clean_name, fragment = urldefrag(name)
|
||||
cache_key = self.cache_key(name)
|
||||
hashed_name = self.cache.get(cache_key)
|
||||
if hashed_name is None:
|
||||
hashed_name = self.hashed_name(name).replace('\\', '/')
|
||||
hashed_name = self.hashed_name(clean_name).replace('\\', '/')
|
||||
# set the cache if there was a miss
|
||||
# (e.g. if cache server goes down)
|
||||
self.cache.set(cache_key, hashed_name)
|
||||
return unquote(super(CachedFilesMixin, self).url(hashed_name))
|
||||
|
||||
final_url = super(CachedFilesMixin, self).url(hashed_name)
|
||||
|
||||
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
||||
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
||||
query_fragment = '?#' in name # [sic!]
|
||||
if fragment or query_fragment:
|
||||
urlparts = list(urlsplit(final_url))
|
||||
if fragment and not urlparts[4]:
|
||||
urlparts[4] = fragment
|
||||
if query_fragment and not urlparts[3]:
|
||||
urlparts[2] += '?'
|
||||
final_url = urlunsplit(urlparts)
|
||||
|
||||
return unquote(final_url)
|
||||
|
||||
def url_converter(self, name):
|
||||
"""
|
||||
|
|
@ -124,8 +139,9 @@ class CachedFilesMixin(object):
|
|||
of the storage.
|
||||
"""
|
||||
matched, url = matchobj.groups()
|
||||
# Completely ignore http(s) prefixed URLs
|
||||
if url.startswith(('#', 'http', 'https', 'data:')):
|
||||
# Completely ignore http(s) prefixed URLs,
|
||||
# fragments and data-uri URLs
|
||||
if url.startswith(('#', 'http:', 'https:', 'data:')):
|
||||
return matched
|
||||
name_parts = name.split(os.sep)
|
||||
# Using posix normpath here to remove duplicates
|
||||
|
|
@ -146,6 +162,7 @@ class CachedFilesMixin(object):
|
|||
start, end = 1, sub_level - 1
|
||||
joined_result = '/'.join(name_parts[:-start] + url_parts[end:])
|
||||
hashed_url = self.url(unquote(joined_result), force=True)
|
||||
|
||||
# Return the hashed and normalized version to the file
|
||||
return 'url("%s")' % unquote(hashed_url)
|
||||
return converter
|
||||
|
|
@ -153,50 +170,72 @@ class CachedFilesMixin(object):
|
|||
def post_process(self, paths, dry_run=False, **options):
|
||||
"""
|
||||
Post process the given list of files (called from collectstatic).
|
||||
|
||||
Processing is actually two separate operations:
|
||||
|
||||
1. renaming files to include a hash of their content for cache-busting,
|
||||
and copying those files to the target storage.
|
||||
2. adjusting files which contain references to other files so they
|
||||
refer to the cache-busting filenames.
|
||||
|
||||
If either of these are performed on a file, then that file is considered
|
||||
post-processed.
|
||||
"""
|
||||
processed_files = []
|
||||
# don't even dare to process the files if we're in dry run mode
|
||||
if dry_run:
|
||||
return processed_files
|
||||
return
|
||||
|
||||
# delete cache of all handled paths
|
||||
self.cache.delete_many([self.cache_key(path) for path in paths])
|
||||
|
||||
# only try processing the files we have patterns for
|
||||
# build a list of adjustable files
|
||||
matches = lambda path: matches_patterns(path, self._patterns.keys())
|
||||
processing_paths = [path for path in paths if matches(path)]
|
||||
adjustable_paths = [path for path in paths if matches(path)]
|
||||
|
||||
# then sort the files by the directory level
|
||||
path_level = lambda name: len(name.split(os.sep))
|
||||
for name in sorted(paths, key=path_level, reverse=True):
|
||||
for name in sorted(paths.keys(), key=path_level, reverse=True):
|
||||
|
||||
# first get a hashed name for the given file
|
||||
hashed_name = self.hashed_name(name)
|
||||
# use the original, local file, not the copied-but-unprocessed
|
||||
# file, which might be somewhere far away, like S3
|
||||
with paths[name] as original_file:
|
||||
|
||||
with self.open(name) as original_file:
|
||||
# then get the original's file content
|
||||
content = original_file.read()
|
||||
# generate the hash with the original content, even for
|
||||
# adjustable files.
|
||||
hashed_name = self.hashed_name(name, original_file)
|
||||
|
||||
# to apply each replacement pattern on the content
|
||||
if name in processing_paths:
|
||||
# then get the original's file content..
|
||||
if hasattr(original_file, 'seek'):
|
||||
original_file.seek(0)
|
||||
|
||||
hashed_file_exists = self.exists(hashed_name)
|
||||
processed = False
|
||||
|
||||
# ..to apply each replacement pattern to the content
|
||||
if name in adjustable_paths:
|
||||
content = original_file.read()
|
||||
converter = self.url_converter(name)
|
||||
for patterns in self._patterns.values():
|
||||
for pattern in patterns:
|
||||
content = pattern.sub(converter, content)
|
||||
|
||||
# then save the processed result
|
||||
if self.exists(hashed_name):
|
||||
self.delete(hashed_name)
|
||||
|
||||
content_file = ContentFile(smart_str(content))
|
||||
saved_name = self._save(hashed_name, content_file)
|
||||
hashed_name = force_unicode(saved_name.replace('\\', '/'))
|
||||
processed_files.append(hashed_name)
|
||||
if hashed_file_exists:
|
||||
self.delete(hashed_name)
|
||||
# then save the processed result
|
||||
content_file = ContentFile(smart_str(content))
|
||||
saved_name = self._save(hashed_name, content_file)
|
||||
hashed_name = force_unicode(saved_name.replace('\\', '/'))
|
||||
processed = True
|
||||
else:
|
||||
# or handle the case in which neither processing nor
|
||||
# a change to the original file happened
|
||||
if not hashed_file_exists:
|
||||
processed = True
|
||||
saved_name = self._save(hashed_name, original_file)
|
||||
hashed_name = force_unicode(saved_name.replace('\\', '/'))
|
||||
|
||||
# and then set the cache accordingly
|
||||
self.cache.set(self.cache_key(name), hashed_name)
|
||||
|
||||
return processed_files
|
||||
yield name, hashed_name, processed
|
||||
|
||||
|
||||
class CachedStaticFilesStorage(CachedFilesMixin, StaticFilesStorage):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue