diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..49cb19117c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Normalize line endings to avoid spurious failures in the core test suite on Windows. +*html text eol=lf \ No newline at end of file diff --git a/AUTHORS b/AUTHORS index 5799b941ff..0f793cc5f4 100644 --- a/AUTHORS +++ b/AUTHORS @@ -91,6 +91,7 @@ answer newbie questions, and generally made Django that much better: James Bennett Danilo Bargen Shai Berger + berto Julian Bez Arvis Bickovskis Natalia Bidart @@ -231,6 +232,7 @@ answer newbie questions, and generally made Django that much better: Simon Greenhill Owen Griffiths Espen Grindhaug + Mike Grouchy Janos Guljas Thomas Güttler Horst Gutmann @@ -380,6 +382,7 @@ answer newbie questions, and generally made Django that much better: Christian Metts michal@plovarna.cz Slawek Mikula + Katie Miller Shawn Milochik mitakummaa@gmail.com Taylor Mitchell @@ -510,6 +513,7 @@ answer newbie questions, and generally made Django that much better: Johan C. Stöver Nowell Strite Thomas Stromberg + Ben Sturmfels Travis Swicegood Pascal Varet SuperJared @@ -528,6 +532,7 @@ answer newbie questions, and generally made Django that much better: Terry Huang Travis Terry thebjorn + Lowe Thiderman Zach Thompson Michael Thornhill Deepak Thukral @@ -585,6 +590,7 @@ answer newbie questions, and generally made Django that much better: Gasper Zejn Jarek Zgoda Cheng Zhang + Hannes Struß A big THANK YOU goes to: diff --git a/django/__init__.py b/django/__init__.py index 32e1374765..873c328add 100644 --- a/django/__init__.py +++ b/django/__init__.py @@ -1,4 +1,4 @@ -VERSION = (1, 5, 0, 'alpha', 0) +VERSION = (1, 6, 0, 'alpha', 0) def get_version(*args, **kwargs): # Don't litter django/__init__.py with all the get_version stuff. diff --git a/django/bin/daily_cleanup.py b/django/bin/daily_cleanup.py index c9f4cb905c..ac3de00f2c 100755 --- a/django/bin/daily_cleanup.py +++ b/django/bin/daily_cleanup.py @@ -7,7 +7,13 @@ Can be run as a cronjob to clean out old data from the database (only expired sessions at the moment). """ +import warnings + from django.core import management if __name__ == "__main__": - management.call_command('cleanup') + warnings.warn( + "The `daily_cleanup` script has been deprecated " + "in favor of `django-admin.py clearsessions`.", + PendingDeprecationWarning) + management.call_command('clearsessions') diff --git a/django/conf/__init__.py b/django/conf/__init__.py index 1804c851bf..b00c8d5046 100644 --- a/django/conf/__init__.py +++ b/django/conf/__init__.py @@ -6,6 +6,7 @@ variable, and then from django.conf.global_settings; see the global settings fil a list of all possible variables. """ +import logging import os import time # Needed for Windows import warnings @@ -55,6 +56,15 @@ class LazySettings(LazyObject): """ Setup logging from LOGGING_CONFIG and LOGGING settings. """ + try: + # Route warnings through python logging + logging.captureWarnings(True) + # Allow DeprecationWarnings through the warnings filters + warnings.simplefilter("default", DeprecationWarning) + except AttributeError: + # No captureWarnings on Python 2.6, DeprecationWarnings are on anyway + pass + if self.LOGGING_CONFIG: from django.utils.log import DEFAULT_LOGGING # First find the logging configuration function ... @@ -83,6 +93,7 @@ class LazySettings(LazyObject): for name, value in options.items(): setattr(holder, name, value) self._wrapped = holder + self._configure_logging() @property def configured(self): @@ -99,9 +110,6 @@ class BaseSettings(object): def __setattr__(self, name, value): if name in ("MEDIA_URL", "STATIC_URL") and value and not value.endswith('/'): raise ImproperlyConfigured("If set, %s must end with a slash" % name) - elif name == "ADMIN_MEDIA_PREFIX": - warnings.warn("The ADMIN_MEDIA_PREFIX setting has been removed; " - "use STATIC_URL instead.", DeprecationWarning) elif name == "ALLOWED_INCLUDE_ROOTS" and isinstance(value, six.string_types): raise ValueError("The ALLOWED_INCLUDE_ROOTS setting must be set " "to a tuple, not a string.") diff --git a/django/conf/global_settings.py b/django/conf/global_settings.py index f1cbb22880..c533efc41c 100644 --- a/django/conf/global_settings.py +++ b/django/conf/global_settings.py @@ -150,12 +150,8 @@ SERVER_EMAIL = 'root@localhost' # Whether to send broken-link emails. SEND_BROKEN_LINK_EMAILS = False -# Database connection info. -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.dummy', - }, -} +# Database connection info. If left empty, will default to the dummy backend. +DATABASES = {} # Classes used to implement DB routing behavior. DATABASE_ROUTERS = [] @@ -449,6 +445,7 @@ MIDDLEWARE_CLASSES = ( # SESSIONS # ############ +SESSION_CACHE_ALIAS = 'default' # Cache to store session data if using the cache session backend. SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want. SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks). SESSION_COOKIE_DOMAIN = None # A string like ".example.com", or None for standard domain cookie. diff --git a/django/conf/locale/__init__.py b/django/conf/locale/__init__.py index 93e98194a4..45e56b6d19 100644 --- a/django/conf/locale/__init__.py +++ b/django/conf/locale/__init__.py @@ -1,5 +1,8 @@ from __future__ import unicode_literals +# About name_local: capitalize it as if your language name was appearing +# inside a sentence in your language. + LANG_INFO = { 'ar': { 'bidi': True, @@ -53,7 +56,7 @@ LANG_INFO = { 'bidi': False, 'code': 'da', 'name': 'Danish', - 'name_local': 'Dansk', + 'name_local': 'dansk', }, 'de': { 'bidi': False, @@ -137,7 +140,7 @@ LANG_INFO = { 'bidi': False, 'code': 'fr', 'name': 'French', - 'name_local': 'Fran\xe7ais', + 'name_local': 'fran\xe7ais', }, 'fy-nl': { 'bidi': False, @@ -269,7 +272,7 @@ LANG_INFO = { 'bidi': False, 'code': 'nb', 'name': 'Norwegian Bokmal', - 'name_local': 'Norsk (bokm\xe5l)', + 'name_local': 'norsk (bokm\xe5l)', }, 'ne': { 'bidi': False, @@ -287,13 +290,13 @@ LANG_INFO = { 'bidi': False, 'code': 'nn', 'name': 'Norwegian Nynorsk', - 'name_local': 'Norsk (nynorsk)', + 'name_local': 'norsk (nynorsk)', }, 'no': { 'bidi': False, 'code': 'no', 'name': 'Norwegian', - 'name_local': 'Norsk', + 'name_local': 'norsk', }, 'pa': { 'bidi': False, @@ -365,7 +368,7 @@ LANG_INFO = { 'bidi': False, 'code': 'sv', 'name': 'Swedish', - 'name_local': 'Svenska', + 'name_local': 'svenska', }, 'sw': { 'bidi': False, diff --git a/django/conf/locale/ca/formats.py b/django/conf/locale/ca/formats.py index 9a0054d0fb..b9431b5b67 100644 --- a/django/conf/locale/ca/formats.py +++ b/django/conf/locale/ca/formats.py @@ -19,10 +19,6 @@ DATE_INPUT_FORMATS = ( # '31/12/2009', '31/12/09' '%d/%m/%Y', '%d/%m/%y' ) -TIME_INPUT_FORMATS = ( - # '14:30:59', '14:30' - '%H:%M:%S', '%H:%M' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M', diff --git a/django/conf/locale/cs/formats.py b/django/conf/locale/cs/formats.py index f0b674227b..ea28718f57 100644 --- a/django/conf/locale/cs/formats.py +++ b/django/conf/locale/cs/formats.py @@ -17,21 +17,26 @@ FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( - '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' + '%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06' + '%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) +# Kept ISO formats as one is in first position TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' + '%H:%M:%S', # '04:30:59' + '%H.%M', # '04.30' + '%H:%M', # '04:30' ) DATETIME_INPUT_FORMATS = ( - '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' - '%d.%m.%Y %H:%M', # '25.10.2006 14:30' - '%d.%m.%Y', # '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' + '%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59' + '%d.%m.%Y %H.%M', # '05.01.2006 04.30' + '%d.%m.%Y %H:%M', # '05.01.2006 04:30' + '%d.%m.%Y', # '05.01.2006' + '%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59' + '%d. %m. %Y %H.%M', # '05. 01. 2006 04.30' + '%d. %m. %Y %H:%M', # '05. 01. 2006 04:30' + '%d. %m. %Y', # '05. 01. 2006' + '%Y-%m-%d %H.%M', # '2006-01-05 04.30' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space diff --git a/django/conf/locale/da/formats.py b/django/conf/locale/da/formats.py index f6cdea8b59..9022eb2ed4 100644 --- a/django/conf/locale/da/formats.py +++ b/django/conf/locale/da/formats.py @@ -18,10 +18,6 @@ FIRST_DAY_OF_WEEK = 1 DATE_INPUT_FORMATS = ( '%d.%m.%Y', # '25.10.2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' diff --git a/django/conf/locale/de/formats.py b/django/conf/locale/de/formats.py index 79d555b989..a75b806acb 100644 --- a/django/conf/locale/de/formats.py +++ b/django/conf/locale/de/formats.py @@ -17,20 +17,12 @@ FIRST_DAY_OF_WEEK = 1 # Monday # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' diff --git a/django/conf/locale/de_CH/formats.py b/django/conf/locale/de_CH/formats.py index 7cbf76e7db..667d1eced0 100644 --- a/django/conf/locale/de_CH/formats.py +++ b/django/conf/locale/de_CH/formats.py @@ -19,20 +19,12 @@ FIRST_DAY_OF_WEEK = 1 # Monday # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) # these are the separators for non-monetary numbers. For monetary numbers, diff --git a/django/conf/locale/en/formats.py b/django/conf/locale/en/formats.py index 6cf2335f5e..6bd693e60e 100644 --- a/django/conf/locale/en/formats.py +++ b/django/conf/locale/en/formats.py @@ -15,6 +15,7 @@ FIRST_DAY_OF_WEEK = 0 # Sunday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06' # '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006' @@ -22,10 +23,6 @@ DATE_INPUT_FORMATS = ( # '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006' # '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/en_GB/formats.py b/django/conf/locale/en_GB/formats.py index ea9a3f6303..b594aafb74 100644 --- a/django/conf/locale/en_GB/formats.py +++ b/django/conf/locale/en_GB/formats.py @@ -17,16 +17,11 @@ FIRST_DAY_OF_WEEK = 0 # Sunday # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06' - '%Y-%m-%d', # '2006-10-25' # '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006' # '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006' # '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006' # '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/es/formats.py b/django/conf/locale/es/formats.py index f10351c649..6241158338 100644 --- a/django/conf/locale/es/formats.py +++ b/django/conf/locale/es/formats.py @@ -19,10 +19,6 @@ DATE_INPUT_FORMATS = ( # '31/12/2009', '31/12/09' '%d/%m/%Y', '%d/%m/%y' ) -TIME_INPUT_FORMATS = ( - # '14:30:59', '14:30' - '%H:%M:%S', '%H:%M' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M', diff --git a/django/conf/locale/es_AR/formats.py b/django/conf/locale/es_AR/formats.py index e28841f90a..651690bfdf 100644 --- a/django/conf/locale/es_AR/formats.py +++ b/django/conf/locale/es_AR/formats.py @@ -19,10 +19,6 @@ DATE_INPUT_FORMATS = ( '%d/%m/%Y', # '31/12/2009' '%d/%m/%y', # '31/12/09' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M', diff --git a/django/conf/locale/es_MX/formats.py b/django/conf/locale/es_MX/formats.py index cef6b4e2f9..e0e6d7f16d 100644 --- a/django/conf/locale/es_MX/formats.py +++ b/django/conf/locale/es_MX/formats.py @@ -15,9 +15,6 @@ DATE_INPUT_FORMATS = ( '%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06' '%Y%m%d', # '20061025' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', '%H:%M', # '14:30:59', '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M', diff --git a/django/conf/locale/es_NI/formats.py b/django/conf/locale/es_NI/formats.py index fd5cc9a925..a22432a16a 100644 --- a/django/conf/locale/es_NI/formats.py +++ b/django/conf/locale/es_NI/formats.py @@ -15,9 +15,6 @@ DATE_INPUT_FORMATS = ( '%Y%m%d', # '20061025' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', '%H:%M', # '14:30:59', '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', '%d/%m/%Y %H:%M', diff --git a/django/conf/locale/fr/formats.py b/django/conf/locale/fr/formats.py index 3b5e8861d8..1ef7ecb242 100644 --- a/django/conf/locale/fr/formats.py +++ b/django/conf/locale/fr/formats.py @@ -19,13 +19,8 @@ FIRST_DAY_OF_WEEK = 1 # Monday DATE_INPUT_FORMATS = ( '%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06' '%d.%m.%Y', '%d.%m.%y', # Swiss (fr_CH), '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' # '%d %B %Y', '%d %b %Y', # '25 octobre 2006', '25 oct. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59' '%d/%m/%Y %H:%M', # '25/10/2006 14:30' @@ -33,9 +28,6 @@ DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # Swiss (fr_CH), '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # Swiss (fr_CH), '25.10.2006 14:30' '%d.%m.%Y', # Swiss (fr_CH), '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space diff --git a/django/conf/locale/hr/formats.py b/django/conf/locale/hr/formats.py index 72fcf51fc7..9f4c74051e 100644 --- a/django/conf/locale/hr/formats.py +++ b/django/conf/locale/hr/formats.py @@ -15,15 +15,12 @@ FIRST_DAY_OF_WEEK = 1 # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', # '2006-10-25' '%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.' '%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/it/formats.py b/django/conf/locale/it/formats.py index 330ef4e150..de81fa6cdf 100644 --- a/django/conf/locale/it/formats.py +++ b/django/conf/locale/it/formats.py @@ -20,10 +20,6 @@ DATE_INPUT_FORMATS = ( '%d-%m-%Y', '%Y-%m-%d', # '25-10-2006', '2008-10-25' '%d-%m-%y', '%d/%m/%y', # '25-10-06', '25/10/06' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59' '%d/%m/%Y %H:%M', # '25/10/2006 14:30' diff --git a/django/conf/locale/ka/formats.py b/django/conf/locale/ka/formats.py index 5117e4aa02..c3552e0661 100644 --- a/django/conf/locale/ka/formats.py +++ b/django/conf/locale/ka/formats.py @@ -15,16 +15,13 @@ FIRST_DAY_OF_WEEK = 1 # (Monday) # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06' # '%d %b %Y', '%d %b, %Y', '%d %b. %Y', # '25 Oct 2006', '25 Oct, 2006', '25 Oct. 2006' # '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006' # '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/ko/formats.py b/django/conf/locale/ko/formats.py index 3c0431e4bf..b794afe900 100644 --- a/django/conf/locale/ko/formats.py +++ b/django/conf/locale/ko/formats.py @@ -16,6 +16,7 @@ SHORT_DATETIME_FORMAT = 'Y-n-j H:i' # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06' # '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006' diff --git a/django/conf/locale/lv/formats.py b/django/conf/locale/lv/formats.py index e4ef524adf..f115a6e487 100644 --- a/django/conf/locale/lv/formats.py +++ b/django/conf/locale/lv/formats.py @@ -16,6 +16,7 @@ FIRST_DAY_OF_WEEK = 1 #Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' ) diff --git a/django/conf/locale/mk/formats.py b/django/conf/locale/mk/formats.py index e217e655e6..44feb512bf 100644 --- a/django/conf/locale/mk/formats.py +++ b/django/conf/locale/mk/formats.py @@ -18,12 +18,6 @@ FIRST_DAY_OF_WEEK = 1 DATE_INPUT_FORMATS = ( '%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.' '%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.' - '%Y-%m-%d', # '2006-10-25' -) - -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' ) DATETIME_INPUT_FORMATS = ( @@ -39,9 +33,6 @@ DATETIME_INPUT_FORMATS = ( '%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59' '%d. %m. %y. %H:%M', # '25. 10. 06. 14:30' '%d. %m. %y.', # '25. 10. 06.' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' diff --git a/django/conf/locale/ml/formats.py b/django/conf/locale/ml/formats.py index 6cf2335f5e..6bd693e60e 100644 --- a/django/conf/locale/ml/formats.py +++ b/django/conf/locale/ml/formats.py @@ -15,6 +15,7 @@ FIRST_DAY_OF_WEEK = 0 # Sunday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06' # '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006' @@ -22,10 +23,6 @@ DATE_INPUT_FORMATS = ( # '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006' # '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/nb/formats.py b/django/conf/locale/nb/formats.py index 4a896dd80d..9a009dc03a 100644 --- a/django/conf/locale/nb/formats.py +++ b/django/conf/locale/nb/formats.py @@ -16,22 +16,17 @@ FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' - '%Y-%m-%d', # '2006-10-25', # '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006' # '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006' # '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%Y-%m-%d', # '2006-10-25' - '%Y-%m-%d', # '2006-10-25' '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' diff --git a/django/conf/locale/nl/formats.py b/django/conf/locale/nl/formats.py index 758737e8c9..be5a146104 100644 --- a/django/conf/locale/nl/formats.py +++ b/django/conf/locale/nl/formats.py @@ -16,10 +16,11 @@ FIRST_DAY_OF_WEEK = 1 # Monday (in Dutch 'maandag') # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( - '%d-%m-%Y', '%d-%m-%y', '%Y-%m-%d', # '20-01-2009', '20-01-09', '2009-01-20' - # '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09' - # '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 09' + '%d-%m-%Y', '%d-%m-%y', # '20-01-2009', '20-01-09' + # '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09' + # '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 09' ) +# Kept ISO formats as one is in first position TIME_INPUT_FORMATS = ( '%H:%M:%S', # '15:23:35' '%H.%M:%S', # '15.23:35' diff --git a/django/conf/locale/nn/formats.py b/django/conf/locale/nn/formats.py index 4a896dd80d..a9b8b562fc 100644 --- a/django/conf/locale/nn/formats.py +++ b/django/conf/locale/nn/formats.py @@ -16,17 +16,13 @@ FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' - '%Y-%m-%d', # '2006-10-25', # '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006' # '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006' # '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/pl/formats.py b/django/conf/locale/pl/formats.py index 021063d474..064e75f1ee 100644 --- a/django/conf/locale/pl/formats.py +++ b/django/conf/locale/pl/formats.py @@ -18,20 +18,13 @@ FIRST_DAY_OF_WEEK = 1 # Monday # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' + '%y-%m-%d', # '06-10-25' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = ' ' diff --git a/django/conf/locale/pt/formats.py b/django/conf/locale/pt/formats.py index a6ce2c08de..2d6ca69647 100644 --- a/django/conf/locale/pt/formats.py +++ b/django/conf/locale/pt/formats.py @@ -15,15 +15,12 @@ FIRST_DAY_OF_WEEK = 0 # Sunday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', # '2006-10-25', '25/10/2006', '25/10/06' # '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006' # '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/pt_BR/formats.py b/django/conf/locale/pt_BR/formats.py index 6d9edeecce..7a6e0877c3 100644 --- a/django/conf/locale/pt_BR/formats.py +++ b/django/conf/locale/pt_BR/formats.py @@ -17,14 +17,10 @@ FIRST_DAY_OF_WEEK = 0 # Sunday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( - '%d/%m/%Y', '%d/%m/%y', '%Y-%m-%d', # '25/10/2006', '25/10/06', '2006-10-25' + '%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06' # '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006' # '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59' '%d/%m/%Y %H:%M', # '25/10/2006 14:30' @@ -32,9 +28,6 @@ DATETIME_INPUT_FORMATS = ( '%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59' '%d/%m/%y %H:%M', # '25/10/06 14:30' '%d/%m/%y', # '25/10/06' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' diff --git a/django/conf/locale/ru/formats.py b/django/conf/locale/ru/formats.py index ec46bff400..d692e67833 100644 --- a/django/conf/locale/ru/formats.py +++ b/django/conf/locale/ru/formats.py @@ -19,12 +19,7 @@ FIRST_DAY_OF_WEEK = 1 # Monday DATE_INPUT_FORMATS = ( '%d.%m.%Y', # '25.10.2006' '%d.%m.%y', # '25.10.06' - '%Y-%m-%d', # '2006-10-25' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' @@ -32,9 +27,6 @@ DATETIME_INPUT_FORMATS = ( '%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59' '%d.%m.%y %H:%M', # '25.10.06 14:30' '%d.%m.%y', # '25.10.06' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space diff --git a/django/conf/locale/sk/formats.py b/django/conf/locale/sk/formats.py index 4b2201f49a..85e2cd8af9 100644 --- a/django/conf/locale/sk/formats.py +++ b/django/conf/locale/sk/formats.py @@ -18,20 +18,13 @@ FIRST_DAY_OF_WEEK = 1 # Monday # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' + '%y-%m-%d', # '06-10-25' # '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space diff --git a/django/conf/locale/sl/formats.py b/django/conf/locale/sl/formats.py index f019323bcd..0d6137e1ed 100644 --- a/django/conf/locale/sl/formats.py +++ b/django/conf/locale/sl/formats.py @@ -21,11 +21,6 @@ DATE_INPUT_FORMATS = ( '%d. %m. %Y', '%d. %m. %y', # '25. 10. 2006', '25. 10. 06' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) - DATETIME_INPUT_FORMATS = ( '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' diff --git a/django/conf/locale/sr/formats.py b/django/conf/locale/sr/formats.py index 5e05832f7b..227f20d723 100644 --- a/django/conf/locale/sr/formats.py +++ b/django/conf/locale/sr/formats.py @@ -18,15 +18,10 @@ FIRST_DAY_OF_WEEK = 1 DATE_INPUT_FORMATS = ( '%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.' '%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.' - '%Y-%m-%d', # '2006-10-25' # '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.' # '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.' # '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59' '%d.%m.%Y. %H:%M', # '25.10.2006. 14:30' @@ -40,9 +35,6 @@ DATETIME_INPUT_FORMATS = ( '%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59' '%d. %m. %y. %H:%M', # '25. 10. 06. 14:30' '%d. %m. %y.', # '25. 10. 06.' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' diff --git a/django/conf/locale/sr_Latn/formats.py b/django/conf/locale/sr_Latn/formats.py index 5e05832f7b..227f20d723 100644 --- a/django/conf/locale/sr_Latn/formats.py +++ b/django/conf/locale/sr_Latn/formats.py @@ -18,15 +18,10 @@ FIRST_DAY_OF_WEEK = 1 DATE_INPUT_FORMATS = ( '%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.' '%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.' - '%Y-%m-%d', # '2006-10-25' # '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.' # '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.' # '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59' '%d.%m.%Y. %H:%M', # '25.10.2006. 14:30' @@ -40,9 +35,6 @@ DATETIME_INPUT_FORMATS = ( '%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59' '%d. %m. %y. %H:%M', # '25. 10. 06. 14:30' '%d. %m. %y.', # '25. 10. 06.' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' diff --git a/django/conf/locale/sv/formats.py b/django/conf/locale/sv/formats.py index 767dbe8d3e..0f52c2c1a5 100644 --- a/django/conf/locale/sv/formats.py +++ b/django/conf/locale/sv/formats.py @@ -16,15 +16,12 @@ FIRST_DAY_OF_WEEK = 1 # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior +# Kept ISO formats as they are in first position DATE_INPUT_FORMATS = ( '%Y-%m-%d', # '2006-10-25' '%m/%d/%Y', # '10/25/2006' '%m/%d/%y', # '10/25/06' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' diff --git a/django/conf/locale/tr/formats.py b/django/conf/locale/tr/formats.py index 5fb2e42f09..705b2ed659 100644 --- a/django/conf/locale/tr/formats.py +++ b/django/conf/locale/tr/formats.py @@ -17,20 +17,13 @@ FIRST_DAY_OF_WEEK = 1 # Pazartesi # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = ( '%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06' - '%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25' + '%y-%m-%d', # '06-10-25' # '%d %B %Y', '%d %b. %Y', # '25 Ekim 2006', '25 Eki. 2006' ) -TIME_INPUT_FORMATS = ( - '%H:%M:%S', # '14:30:59' - '%H:%M', # '14:30' -) DATETIME_INPUT_FORMATS = ( '%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59' '%d/%m/%Y %H:%M', # '25/10/2006 14:30' '%d/%m/%Y', # '25/10/2006' - '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' - '%Y-%m-%d %H:%M', # '2006-10-25 14:30' - '%Y-%m-%d', # '2006-10-25' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' diff --git a/django/conf/project_template/project_name/settings.py b/django/conf/project_template/project_name/settings.py index 6bdaa34988..559e27ca16 100644 --- a/django/conf/project_template/project_name/settings.py +++ b/django/conf/project_template/project_name/settings.py @@ -75,7 +75,7 @@ STATICFILES_DIRS = ( STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', -# 'django.contrib.staticfiles.finders.DefaultStorageFinder', + # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. @@ -85,7 +85,7 @@ SECRET_KEY = '{{ secret_key }}' TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', -# 'django.template.loaders.eggs.Loader', + # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( diff --git a/django/contrib/admin/filters.py b/django/contrib/admin/filters.py index 0a34f807b3..ff66d3e3f3 100644 --- a/django/contrib/admin/filters.py +++ b/django/contrib/admin/filters.py @@ -9,7 +9,7 @@ import datetime from django.db import models from django.core.exceptions import ImproperlyConfigured, ValidationError -from django.utils.encoding import smart_text +from django.utils.encoding import smart_text, force_text from django.utils.translation import ugettext_lazy as _ from django.utils import timezone from django.contrib.admin.util import (get_model_from_relation, @@ -102,7 +102,7 @@ class SimpleListFilter(ListFilter): } for lookup, title in self.lookup_choices: yield { - 'selected': self.value() == lookup, + 'selected': self.value() == force_text(lookup), 'query_string': cl.get_query_string({ self.parameter_name: lookup, }, []), diff --git a/django/contrib/admin/forms.py b/django/contrib/admin/forms.py index f1e7076ece..1fabdce245 100644 --- a/django/contrib/admin/forms.py +++ b/django/contrib/admin/forms.py @@ -6,8 +6,8 @@ from django.contrib.auth import authenticate from django.contrib.auth.forms import AuthenticationForm from django.utils.translation import ugettext_lazy -ERROR_MESSAGE = ugettext_lazy("Please enter the correct username and password " - "for a staff account. Note that both fields are case-sensitive.") +ERROR_MESSAGE = ugettext_lazy("Please enter the correct %(username)s and password " + "for a staff account. Note that both fields may be case-sensitive.") class AdminAuthenticationForm(AuthenticationForm): @@ -26,8 +26,12 @@ class AdminAuthenticationForm(AuthenticationForm): if username and password: self.user_cache = authenticate(username=username, password=password) if self.user_cache is None: - raise forms.ValidationError(message) + raise forms.ValidationError(message % { + 'username': self.username_field.verbose_name + }) elif not self.user_cache.is_active or not self.user_cache.is_staff: - raise forms.ValidationError(message) + raise forms.ValidationError(message % { + 'username': self.username_field.verbose_name + }) self.check_for_test_cookie() return self.cleaned_data diff --git a/django/contrib/admin/helpers.py b/django/contrib/admin/helpers.py index 90370bd978..4203287123 100644 --- a/django/contrib/admin/helpers.py +++ b/django/contrib/admin/helpers.py @@ -186,9 +186,7 @@ class AdminReadonlyField(object): if getattr(attr, "allow_tags", False): result_repr = mark_safe(result_repr) else: - if value is None: - result_repr = EMPTY_CHANGELIST_VALUE - elif isinstance(f.rel, ManyToManyRel): + if isinstance(f.rel, ManyToManyRel) and value is not None: result_repr = ", ".join(map(six.text_type, value.all())) else: result_repr = display_for_field(value, f) diff --git a/django/contrib/admin/models.py b/django/contrib/admin/models.py index e1d3b40d01..b697d7bdc8 100644 --- a/django/contrib/admin/models.py +++ b/django/contrib/admin/models.py @@ -4,7 +4,7 @@ from django.db import models from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.contrib.admin.util import quote -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ugettext, ugettext_lazy as _ from django.utils.encoding import smart_text from django.utils.encoding import python_2_unicode_compatible @@ -42,13 +42,16 @@ class LogEntry(models.Model): def __str__(self): if self.action_flag == ADDITION: - return _('Added "%(object)s".') % {'object': self.object_repr} + return ugettext('Added "%(object)s".') % {'object': self.object_repr} elif self.action_flag == CHANGE: - return _('Changed "%(object)s" - %(changes)s') % {'object': self.object_repr, 'changes': self.change_message} + return ugettext('Changed "%(object)s" - %(changes)s') % { + 'object': self.object_repr, + 'changes': self.change_message, + } elif self.action_flag == DELETION: - return _('Deleted "%(object)s."') % {'object': self.object_repr} + return ugettext('Deleted "%(object)s."') % {'object': self.object_repr} - return _('LogEntry Object') + return ugettext('LogEntry Object') def is_addition(self): return self.action_flag == ADDITION diff --git a/django/contrib/admin/options.py b/django/contrib/admin/options.py index 19c212db9a..1827d40159 100644 --- a/django/contrib/admin/options.py +++ b/django/contrib/admin/options.py @@ -1,3 +1,4 @@ +import copy from functools import update_wrapper, partial import warnings @@ -130,7 +131,7 @@ class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)): # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: - kwargs = dict(self.formfield_overrides[klass], **kwargs) + kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs) return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. @@ -407,8 +408,6 @@ class ModelAdmin(BaseModelAdmin): js.append('actions%s.js' % extra) if self.prepopulated_fields: js.extend(['urlify.js', 'prepopulate%s.js' % extra]) - if self.opts.get_ordered_objects(): - js.extend(['getElementsBySelector.js', 'dom-drag.js' , 'admin/ordering.js']) return forms.Media(js=[static('admin/js/%s' % url) for url in js]) def get_model_perms(self, request): @@ -552,7 +551,7 @@ class ModelAdmin(BaseModelAdmin): """ from django.contrib.admin.models import LogEntry, DELETION LogEntry.objects.log_action( - user_id = request.user.id, + user_id = request.user.pk, content_type_id = ContentType.objects.get_for_model(self.model).pk, object_id = object.pk, object_repr = object_repr, @@ -665,6 +664,13 @@ class ModelAdmin(BaseModelAdmin): # Use only the first item in list_display as link return list(list_display)[:1] + def get_list_filter(self, request): + """ + Returns a sequence containing the fields to be displayed as filters in + the right sidebar of the changelist page. + """ + return self.list_filter + def construct_change_message(self, request, form, formsets): """ Construct a change message from a changed object. @@ -691,12 +697,30 @@ class ModelAdmin(BaseModelAdmin): change_message = ' '.join(change_message) return change_message or _('No fields changed.') - def message_user(self, request, message): + def message_user(self, request, message, level=messages.INFO, extra_tags='', + fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. + + Exposes almost the same API as messages.add_message(), but accepts the + positional arguments in a different order to maintain backwards + compatibility. For convenience, it accepts the `level` argument as + a string rather than the usual level number. """ - messages.info(request, message) + + if not isinstance(level, int): + # attempt to get the level if passed a string + try: + level = getattr(messages.constants, level.upper()) + except AttributeError: + levels = messages.constants.DEFAULT_TAGS.values() + levels_repr = ', '.join('`%s`' % l for l in levels) + raise ValueError('Bad message level string: `%s`. ' + 'Possible values are: %s' % (level, levels_repr)) + + messages.add_message(request, level, message, extra_tags=extra_tags, + fail_silently=fail_silently) def save_form(self, request, form, change): """ @@ -738,7 +762,6 @@ class ModelAdmin(BaseModelAdmin): def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label - ordered_objects = opts.get_ordered_objects() context.update({ 'add': add, 'change': change, @@ -747,7 +770,6 @@ class ModelAdmin(BaseModelAdmin): 'has_delete_permission': self.has_delete_permission(request, obj), 'has_file_field': True, # FIXME - this should check if form or formsets have a FileField, 'has_absolute_url': hasattr(self.model, 'get_absolute_url'), - 'ordered_objects': ordered_objects, 'form_url': form_url, 'opts': opts, 'content_type_id': ContentType.objects.get_for_model(self.model).id, @@ -1174,6 +1196,7 @@ class ModelAdmin(BaseModelAdmin): list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) + list_filter = self.get_list_filter(request) # Check actions to see if any are available on this changelist actions = self.get_actions(request) @@ -1184,7 +1207,7 @@ class ModelAdmin(BaseModelAdmin): ChangeList = self.get_changelist(request) try: cl = ChangeList(request, self.model, list_display, - list_display_links, self.list_filter, self.date_hierarchy, + list_display_links, list_filter, self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page, self.list_max_show_all, self.list_editable, self) diff --git a/django/contrib/admin/sites.py b/django/contrib/admin/sites.py index e375bc608f..185417015a 100644 --- a/django/contrib/admin/sites.py +++ b/django/contrib/admin/sites.py @@ -354,6 +354,7 @@ class AdminSite(object): info = (app_label, model._meta.module_name) model_dict = { 'name': capfirst(model._meta.verbose_name_plural), + 'object_name': model._meta.object_name, 'perms': perms, } if perms.get('change', False): @@ -371,6 +372,7 @@ class AdminSite(object): else: app_dict[app_label] = { 'name': app_label.title(), + 'app_label': app_label, 'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name), 'has_module_perms': has_module_perms, 'models': [model_dict], @@ -389,9 +391,9 @@ class AdminSite(object): 'app_list': app_list, } context.update(extra_context or {}) - return TemplateResponse(request, [ - self.index_template or 'admin/index.html', - ], context, current_app=self.name) + return TemplateResponse(request, self.index_template or + 'admin/index.html', context, + current_app=self.name) def app_index(self, request, app_label, extra_context=None): user = request.user @@ -408,6 +410,7 @@ class AdminSite(object): info = (app_label, model._meta.module_name) model_dict = { 'name': capfirst(model._meta.verbose_name_plural), + 'object_name': model._meta.object_name, 'perms': perms, } if perms.get('change', False): @@ -428,6 +431,7 @@ class AdminSite(object): # information. app_dict = { 'name': app_label.title(), + 'app_label': app_label, 'app_url': '', 'has_module_perms': has_module_perms, 'models': [model_dict], diff --git a/django/contrib/admin/static/admin/css/base.css b/django/contrib/admin/static/admin/css/base.css index 5e5fc58a77..5ac4032c15 100644 --- a/django/contrib/admin/static/admin/css/base.css +++ b/django/contrib/admin/static/admin/css/base.css @@ -322,6 +322,10 @@ thead th.sorted { background: #c5c5c5 url(../img/nav-bg-selected.gif) top left repeat-x; } +thead th.sorted .text { + padding-right: 42px; +} + table thead th .text span { padding: 2px 5px; display:block; diff --git a/django/contrib/admin/static/admin/css/rtl.css b/django/contrib/admin/static/admin/css/rtl.css index 82d16024e6..ba9f1b5ad7 100644 --- a/django/contrib/admin/static/admin/css/rtl.css +++ b/django/contrib/admin/static/admin/css/rtl.css @@ -84,6 +84,11 @@ table thead th.sorted .sortoptions { float: left; } +thead th.sorted .text { + padding-right: 0; + padding-left: 42px; +} + /* dashboard styles */ .dashboard .module table td a { diff --git a/django/contrib/admin/static/admin/css/widgets.css b/django/contrib/admin/static/admin/css/widgets.css index 0a7012c7b2..3b19353e6f 100644 --- a/django/contrib/admin/static/admin/css/widgets.css +++ b/django/contrib/admin/static/admin/css/widgets.css @@ -225,6 +225,21 @@ table p.datetime { padding-left: 0; } +/* URL */ + +p.url { + line-height: 20px; + margin: 0; + padding: 0; + color: #666; + font-size: 11px; + font-weight: bold; +} + +.url a { + font-weight: normal; +} + /* FILE UPLOADS */ p.file-upload { diff --git a/django/contrib/admin/static/admin/js/admin/ordering.js b/django/contrib/admin/static/admin/js/admin/ordering.js deleted file mode 100644 index 595be4d62b..0000000000 --- a/django/contrib/admin/static/admin/js/admin/ordering.js +++ /dev/null @@ -1,137 +0,0 @@ -addEvent(window, 'load', reorder_init); - -var lis; -var top = 0; -var left = 0; -var height = 30; - -function reorder_init() { - lis = document.getElementsBySelector('ul#orderthese li'); - var input = document.getElementsBySelector('input[name=order_]')[0]; - setOrder(input.value.split(',')); - input.disabled = true; - draw(); - // Now initialize the dragging behavior - var limit = (lis.length - 1) * height; - for (var i = 0; i < lis.length; i++) { - var li = lis[i]; - var img = document.getElementById('handle'+li.id); - li.style.zIndex = 1; - Drag.init(img, li, left + 10, left + 10, top + 10, top + 10 + limit); - li.onDragStart = startDrag; - li.onDragEnd = endDrag; - img.style.cursor = 'move'; - } -} - -function submitOrderForm() { - var inputOrder = document.getElementsBySelector('input[name=order_]')[0]; - inputOrder.value = getOrder(); - inputOrder.disabled=false; -} - -function startDrag() { - this.style.zIndex = '10'; - this.className = 'dragging'; -} - -function endDrag(x, y) { - this.style.zIndex = '1'; - this.className = ''; - // Work out how far along it has been dropped, using x co-ordinate - var oldIndex = this.index; - var newIndex = Math.round((y - 10 - top) / height); - // 'Snap' to the correct position - this.style.top = (10 + top + newIndex * height) + 'px'; - this.index = newIndex; - moveItem(oldIndex, newIndex); -} - -function moveItem(oldIndex, newIndex) { - // Swaps two items, adjusts the index and left co-ord for all others - if (oldIndex == newIndex) { - return; // Nothing to swap; - } - var direction, lo, hi; - if (newIndex > oldIndex) { - lo = oldIndex; - hi = newIndex; - direction = -1; - } else { - direction = 1; - hi = oldIndex; - lo = newIndex; - } - var lis2 = new Array(); // We will build the new order in this array - for (var i = 0; i < lis.length; i++) { - if (i < lo || i > hi) { - // Position of items not between the indexes is unaffected - lis2[i] = lis[i]; - continue; - } else if (i == newIndex) { - lis2[i] = lis[oldIndex]; - continue; - } else { - // Item is between the two indexes - move it along 1 - lis2[i] = lis[i - direction]; - } - } - // Re-index everything - reIndex(lis2); - lis = lis2; - draw(); -// document.getElementById('hiddenOrder').value = getOrder(); - document.getElementsBySelector('input[name=order_]')[0].value = getOrder(); -} - -function reIndex(lis) { - for (var i = 0; i < lis.length; i++) { - lis[i].index = i; - } -} - -function draw() { - for (var i = 0; i < lis.length; i++) { - var li = lis[i]; - li.index = i; - li.style.position = 'absolute'; - li.style.left = (10 + left) + 'px'; - li.style.top = (10 + top + (i * height)) + 'px'; - } -} - -function getOrder() { - var order = new Array(lis.length); - for (var i = 0; i < lis.length; i++) { - order[i] = lis[i].id.substring(1, 100); - } - return order.join(','); -} - -function setOrder(id_list) { - /* Set the current order to match the lsit of IDs */ - var temp_lis = new Array(); - for (var i = 0; i < id_list.length; i++) { - var id = 'p' + id_list[i]; - temp_lis[temp_lis.length] = document.getElementById(id); - } - reIndex(temp_lis); - lis = temp_lis; - draw(); -} - -function addEvent(elm, evType, fn, useCapture) -// addEvent and removeEvent -// cross-browser event handling for IE5+, NS6 and Mozilla -// By Scott Andrew -{ - if (elm.addEventListener){ - elm.addEventListener(evType, fn, useCapture); - return true; - } else if (elm.attachEvent){ - var r = elm.attachEvent("on"+evType, fn); - return r; - } else { - elm['on'+evType] = fn; - } -} diff --git a/django/contrib/admin/static/admin/js/getElementsBySelector.js b/django/contrib/admin/static/admin/js/getElementsBySelector.js deleted file mode 100644 index 15b57a1908..0000000000 --- a/django/contrib/admin/static/admin/js/getElementsBySelector.js +++ /dev/null @@ -1,167 +0,0 @@ -/* document.getElementsBySelector(selector) - - returns an array of element objects from the current document - matching the CSS selector. Selectors can contain element names, - class names and ids and can be nested. For example: - - elements = document.getElementsBySelect('div#main p a.external') - - Will return an array of all 'a' elements with 'external' in their - class attribute that are contained inside 'p' elements that are - contained inside the 'div' element which has id="main" - - New in version 0.4: Support for CSS2 and CSS3 attribute selectors: - See http://www.w3.org/TR/css3-selectors/#attribute-selectors - - Version 0.4 - Simon Willison, March 25th 2003 - -- Works in Phoenix 0.5, Mozilla 1.3, Opera 7, Internet Explorer 6, Internet Explorer 5 on Windows - -- Opera 7 fails -*/ - -function getAllChildren(e) { - // Returns all children of element. Workaround required for IE5/Windows. Ugh. - return e.all ? e.all : e.getElementsByTagName('*'); -} - -document.getElementsBySelector = function(selector) { - // Attempt to fail gracefully in lesser browsers - if (!document.getElementsByTagName) { - return new Array(); - } - // Split selector in to tokens - var tokens = selector.split(' '); - var currentContext = new Array(document); - for (var i = 0; i < tokens.length; i++) { - token = tokens[i].replace(/^\s+/,'').replace(/\s+$/,'');; - if (token.indexOf('#') > -1) { - // Token is an ID selector - var bits = token.split('#'); - var tagName = bits[0]; - var id = bits[1]; - var element = document.getElementById(id); - if (!element || (tagName && element.nodeName.toLowerCase() != tagName)) { - // ID not found or tag with that ID not found, return false. - return new Array(); - } - // Set currentContext to contain just this element - currentContext = new Array(element); - continue; // Skip to next token - } - if (token.indexOf('.') > -1) { - // Token contains a class selector - var bits = token.split('.'); - var tagName = bits[0]; - var className = bits[1]; - if (!tagName) { - tagName = '*'; - } - // Get elements matching tag, filter them for class selector - var found = new Array; - var foundCount = 0; - for (var h = 0; h < currentContext.length; h++) { - var elements; - if (tagName == '*') { - elements = getAllChildren(currentContext[h]); - } else { - try { - elements = currentContext[h].getElementsByTagName(tagName); - } - catch(e) { - elements = []; - } - } - for (var j = 0; j < elements.length; j++) { - found[foundCount++] = elements[j]; - } - } - currentContext = new Array; - var currentContextIndex = 0; - for (var k = 0; k < found.length; k++) { - if (found[k].className && found[k].className.match(new RegExp('\\b'+className+'\\b'))) { - currentContext[currentContextIndex++] = found[k]; - } - } - continue; // Skip to next token - } - // Code to deal with attribute selectors - if (token.match(/^(\w*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/)) { - var tagName = RegExp.$1; - var attrName = RegExp.$2; - var attrOperator = RegExp.$3; - var attrValue = RegExp.$4; - if (!tagName) { - tagName = '*'; - } - // Grab all of the tagName elements within current context - var found = new Array; - var foundCount = 0; - for (var h = 0; h < currentContext.length; h++) { - var elements; - if (tagName == '*') { - elements = getAllChildren(currentContext[h]); - } else { - elements = currentContext[h].getElementsByTagName(tagName); - } - for (var j = 0; j < elements.length; j++) { - found[foundCount++] = elements[j]; - } - } - currentContext = new Array; - var currentContextIndex = 0; - var checkFunction; // This function will be used to filter the elements - switch (attrOperator) { - case '=': // Equality - checkFunction = function(e) { return (e.getAttribute(attrName) == attrValue); }; - break; - case '~': // Match one of space seperated words - checkFunction = function(e) { return (e.getAttribute(attrName).match(new RegExp('\\b'+attrValue+'\\b'))); }; - break; - case '|': // Match start with value followed by optional hyphen - checkFunction = function(e) { return (e.getAttribute(attrName).match(new RegExp('^'+attrValue+'-?'))); }; - break; - case '^': // Match starts with value - checkFunction = function(e) { return (e.getAttribute(attrName).indexOf(attrValue) == 0); }; - break; - case '$': // Match ends with value - fails with "Warning" in Opera 7 - checkFunction = function(e) { return (e.getAttribute(attrName).lastIndexOf(attrValue) == e.getAttribute(attrName).length - attrValue.length); }; - break; - case '*': // Match ends with value - checkFunction = function(e) { return (e.getAttribute(attrName).indexOf(attrValue) > -1); }; - break; - default : - // Just test for existence of attribute - checkFunction = function(e) { return e.getAttribute(attrName); }; - } - currentContext = new Array; - var currentContextIndex = 0; - for (var k = 0; k < found.length; k++) { - if (checkFunction(found[k])) { - currentContext[currentContextIndex++] = found[k]; - } - } - // alert('Attribute Selector: '+tagName+' '+attrName+' '+attrOperator+' '+attrValue); - continue; // Skip to next token - } - // If we get here, token is JUST an element (not a class or ID selector) - tagName = token; - var found = new Array; - var foundCount = 0; - for (var h = 0; h < currentContext.length; h++) { - var elements = currentContext[h].getElementsByTagName(tagName); - for (var j = 0; j < elements.length; j++) { - found[foundCount++] = elements[j]; - } - } - currentContext = found; - } - return currentContext; -} - -/* That revolting regular expression explained -/^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ - \---/ \---/\-------------/ \-------/ - | | | | - | | | The value - | | ~,|,^,$,* or = - | Attribute - Tag -*/ diff --git a/django/contrib/admin/templates/admin/auth/user/change_password.html b/django/contrib/admin/templates/admin/auth/user/change_password.html index b5a7715844..83a9c48ee1 100644 --- a/django/contrib/admin/templates/admin/auth/user/change_password.html +++ b/django/contrib/admin/templates/admin/auth/user/change_password.html @@ -1,5 +1,5 @@ {% extends "admin/base_site.html" %} -{% load i18n admin_static admin_modify %} +{% load i18n admin_static %} {% load admin_urls %} {% block extrahead %}{{ block.super }} @@ -13,7 +13,7 @@ {% trans 'Home' %}{{ opts.app_label|capfirst|escape }}{{ opts.verbose_name_plural|capfirst }} -› {{ original|truncatewords:"18" }} +› {{ original|truncatewords:"18" }} › {% trans 'Change password' %} {% endblock %} diff --git a/django/contrib/admin/templates/admin/change_form.html b/django/contrib/admin/templates/admin/change_form.html index 4962e732a2..48846960b3 100644 --- a/django/contrib/admin/templates/admin/change_form.html +++ b/django/contrib/admin/templates/admin/change_form.html @@ -9,7 +9,7 @@ {% block extrastyle %}{{ block.super }}{% endblock %} -{% block coltype %}{% if ordered_objects %}colMS{% else %}colM{% endif %}{% endblock %} +{% block coltype %}colM{% endblock %} {% block bodyclass %}{{ opts.app_label }}-{{ opts.object_name.lower }} change-form{% endblock %} diff --git a/django/contrib/admin/templates/admin/edit_inline/tabular.html b/django/contrib/admin/templates/admin/edit_inline/tabular.html index f2757ede48..71a62ba71a 100644 --- a/django/contrib/admin/templates/admin/edit_inline/tabular.html +++ b/django/contrib/admin/templates/admin/edit_inline/tabular.html @@ -46,7 +46,7 @@ {% for field in line %} {% if field.is_readonly %} -

{{ field.contents }}

+

{{ field.contents|linebreaksbr }}

{% else %} {{ field.field.errors.as_ul }} {{ field.field }} diff --git a/django/contrib/admin/templates/admin/includes/fieldset.html b/django/contrib/admin/templates/admin/includes/fieldset.html index c8d08c880f..09bc971d2f 100644 --- a/django/contrib/admin/templates/admin/includes/fieldset.html +++ b/django/contrib/admin/templates/admin/includes/fieldset.html @@ -14,7 +14,7 @@ {% else %} {{ field.label_tag }} {% if field.is_readonly %} -

{{ field.contents }}

+

{{ field.contents|linebreaksbr }}

{% else %} {{ field.field }} {% endif %} diff --git a/django/contrib/admin/templates/admin/index.html b/django/contrib/admin/templates/admin/index.html index 91ea0844b1..961e4823e0 100644 --- a/django/contrib/admin/templates/admin/index.html +++ b/django/contrib/admin/templates/admin/index.html @@ -14,7 +14,7 @@ {% if app_list %} {% for app in app_list %} -
+
{% for model in app.models %} - + {% if model.admin_url %} {% else %} diff --git a/django/contrib/admin/templates/admin/submit_line.html b/django/contrib/admin/templates/admin/submit_line.html index 8c9d22752d..38a97a1c6a 100644 --- a/django/contrib/admin/templates/admin/submit_line.html +++ b/django/contrib/admin/templates/admin/submit_line.html @@ -1,8 +1,8 @@ {% load i18n admin_urls %}
-{% if show_save %}{% endif %} +{% if show_save %}{% endif %} {% if show_delete_link %}{% endif %} -{% if show_save_as_new %}{%endif%} -{% if show_save_and_add_another %}{% endif %} -{% if show_save_and_continue %}{% endif %} +{% if show_save_as_new %}{%endif%} +{% if show_save_and_add_another %}{% endif %} +{% if show_save_and_continue %}{% endif %}
diff --git a/django/contrib/admin/templatetags/admin_modify.py b/django/contrib/admin/templatetags/admin_modify.py index f6ac59635a..cecc6ed6c4 100644 --- a/django/contrib/admin/templatetags/admin_modify.py +++ b/django/contrib/admin/templatetags/admin_modify.py @@ -30,8 +30,6 @@ def submit_row(context): save_as = context['save_as'] ctx = { 'opts': opts, - 'onclick_attrib': (opts.get_ordered_objects() and change - and 'onclick="submitOrderForm();"' or ''), 'show_delete_link': (not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True)), 'show_save_as_new': not is_popup and change and save_as, diff --git a/django/contrib/admin/templatetags/adminmedia.py b/django/contrib/admin/templatetags/adminmedia.py deleted file mode 100644 index b08d13c18f..0000000000 --- a/django/contrib/admin/templatetags/adminmedia.py +++ /dev/null @@ -1,15 +0,0 @@ -import warnings -from django.template import Library -from django.templatetags.static import PrefixNode - -register = Library() - -@register.simple_tag -def admin_media_prefix(): - """ - Returns the string contained in the setting ADMIN_MEDIA_PREFIX. - """ - warnings.warn( - "The admin_media_prefix template tag is deprecated. " - "Use the static template tag instead.", DeprecationWarning) - return PrefixNode.handle_simple("ADMIN_MEDIA_PREFIX") diff --git a/django/contrib/admin/widgets.py b/django/contrib/admin/widgets.py index 1e0bc2d366..1e6277fb87 100644 --- a/django/contrib/admin/widgets.py +++ b/django/contrib/admin/widgets.py @@ -10,7 +10,7 @@ from django.contrib.admin.templatetags.admin_static import static from django.core.urlresolvers import reverse from django.forms.widgets import RadioFieldRenderer from django.forms.util import flatatt -from django.utils.html import escape, format_html, format_html_join +from django.utils.html import escape, format_html, format_html_join, smart_urlquote from django.utils.text import Truncator from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe @@ -306,6 +306,19 @@ class AdminURLFieldWidget(forms.TextInput): final_attrs.update(attrs) super(AdminURLFieldWidget, self).__init__(attrs=final_attrs) + def render(self, name, value, attrs=None): + html = super(AdminURLFieldWidget, self).render(name, value, attrs) + if value: + value = force_text(self._format_value(value)) + final_attrs = {'href': mark_safe(smart_urlquote(value))} + html = format_html( + '

{0} {2}
{3} {4}

', + _('Currently:'), flatatt(final_attrs), value, + _('Change:'), html + ) + return html + + class AdminIntegerFieldWidget(forms.TextInput): class_name = 'vIntegerField' diff --git a/django/contrib/admindocs/views.py b/django/contrib/admindocs/views.py index 94963b4d39..cb0c116416 100644 --- a/django/contrib/admindocs/views.py +++ b/django/contrib/admindocs/views.py @@ -14,6 +14,7 @@ from django.core import urlresolvers from django.contrib.admindocs import utils from django.contrib.sites.models import Site from django.utils.importlib import import_module +from django.utils._os import upath from django.utils import six from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe @@ -311,7 +312,7 @@ def load_all_installed_template_libraries(): try: libraries = [ os.path.splitext(p)[0] - for p in os.listdir(os.path.dirname(mod.__file__)) + for p in os.listdir(os.path.dirname(upath(mod.__file__))) if p.endswith('.py') and p[0].isalpha() ] except OSError: diff --git a/django/contrib/auth/__init__.py b/django/contrib/auth/__init__.py index dd4a8484f5..99348d3ae5 100644 --- a/django/contrib/auth/__init__.py +++ b/django/contrib/auth/__init__.py @@ -1,6 +1,6 @@ import re -from django.core.exceptions import ImproperlyConfigured +from django.core.exceptions import ImproperlyConfigured, PermissionDenied from django.utils.importlib import import_module from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed @@ -60,6 +60,9 @@ def authenticate(**credentials): except TypeError: # This backend doesn't accept these credentials as arguments. Try the next one. continue + except PermissionDenied: + # This backend says to stop in our tracks - this user should not be allowed in at all. + return None if user is None: continue # Annotate the user object with the path of the backend. @@ -81,14 +84,14 @@ def login(request, user): user = request.user # TODO: It would be nice to support different login methods, like signed cookies. if SESSION_KEY in request.session: - if request.session[SESSION_KEY] != user.id: + if request.session[SESSION_KEY] != user.pk: # To avoid reusing another user's session, create a new, empty # session if the existing session corresponds to a different # authenticated user. request.session.flush() else: request.session.cycle_key() - request.session[SESSION_KEY] = user.id + request.session[SESSION_KEY] = user.pk request.session[BACKEND_SESSION_KEY] = user.backend if hasattr(request, 'user'): request.user = user diff --git a/django/contrib/auth/admin.py b/django/contrib/auth/admin.py index 5f476f91c2..d15a387a7e 100644 --- a/django/contrib/auth/admin.py +++ b/django/contrib/auth/admin.py @@ -133,7 +133,7 @@ class UserAdmin(admin.ModelAdmin): adminForm = admin.helpers.AdminForm(form, fieldsets, {}) context = { - 'title': _('Change password: %s') % escape(user.username), + 'title': _('Change password: %s') % escape(user.get_username()), 'adminForm': adminForm, 'form_url': form_url, 'form': form, @@ -148,10 +148,10 @@ class UserAdmin(admin.ModelAdmin): 'save_as': False, 'show_save': True, } - return TemplateResponse(request, [ + return TemplateResponse(request, self.change_user_password_template or - 'admin/auth/user/change_password.html' - ], context, current_app=self.admin_site.name) + 'admin/auth/user/change_password.html', + context, current_app=self.admin_site.name) def response_add(self, request, obj, **kwargs): """ diff --git a/django/contrib/auth/context_processors.py b/django/contrib/auth/context_processors.py index 5929505359..3d17fe2754 100644 --- a/django/contrib/auth/context_processors.py +++ b/django/contrib/auth/context_processors.py @@ -18,7 +18,9 @@ class PermLookupDict(object): def __bool__(self): return self.user.has_module_perms(self.module_name) - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) class PermWrapper(object): diff --git a/django/contrib/auth/forms.py b/django/contrib/auth/forms.py index 423e3429e6..4e2f476cec 100644 --- a/django/contrib/auth/forms.py +++ b/django/contrib/auth/forms.py @@ -27,7 +27,7 @@ class ReadOnlyPasswordHashWidget(forms.Widget): encoded = value final_attrs = self.build_attrs(attrs) - if encoded == '' or encoded == UNUSABLE_PASSWORD: + if not encoded or encoded == UNUSABLE_PASSWORD: summary = mark_safe("%s" % ugettext("No password set.")) else: try: @@ -52,6 +52,11 @@ class ReadOnlyPasswordHashField(forms.Field): kwargs.setdefault("required", False) super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs) + def bound_data(self, data, initial): + # Always return initial because the widget doesn't + # render an input field. + return initial + class UserCreationForm(forms.ModelForm): """ @@ -143,8 +148,8 @@ class AuthenticationForm(forms.Form): password = forms.CharField(label=_("Password"), widget=forms.PasswordInput) error_messages = { - 'invalid_login': _("Please enter a correct username and password. " - "Note that both fields are case-sensitive."), + 'invalid_login': _("Please enter a correct %(username)s and password. " + "Note that both fields may be case-sensitive."), 'no_cookies': _("Your Web browser doesn't appear to have cookies " "enabled. Cookies are required for logging in."), 'inactive': _("This account is inactive."), @@ -163,8 +168,8 @@ class AuthenticationForm(forms.Form): # Set the label for the "username" field. UserModel = get_user_model() - username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD) - self.fields['username'].label = capfirst(username_field.verbose_name) + self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD) + self.fields['username'].label = capfirst(self.username_field.verbose_name) def clean(self): username = self.cleaned_data.get('username') @@ -175,7 +180,9 @@ class AuthenticationForm(forms.Form): password=password) if self.user_cache is None: raise forms.ValidationError( - self.error_messages['invalid_login']) + self.error_messages['invalid_login'] % { + 'username': self.username_field.verbose_name + }) elif not self.user_cache.is_active: raise forms.ValidationError(self.error_messages['inactive']) self.check_for_test_cookie() @@ -209,10 +216,12 @@ class PasswordResetForm(forms.Form): """ UserModel = get_user_model() email = self.cleaned_data["email"] - self.users_cache = UserModel.objects.filter(email__iexact=email, - is_active=True) + self.users_cache = UserModel.objects.filter(email__iexact=email) if not len(self.users_cache): raise forms.ValidationError(self.error_messages['unknown']) + if not any(user.is_active for user in self.users_cache): + # none of the filtered users are active + raise forms.ValidationError(self.error_messages['unknown']) if any((user.password == UNUSABLE_PASSWORD) for user in self.users_cache): raise forms.ValidationError(self.error_messages['unusable']) @@ -239,7 +248,7 @@ class PasswordResetForm(forms.Form): 'email': user.email, 'domain': domain, 'site_name': site_name, - 'uid': int_to_base36(user.id), + 'uid': int_to_base36(user.pk), 'user': user, 'token': token_generator.make_token(user), 'protocol': use_https and 'https' or 'http', diff --git a/django/contrib/auth/handlers/modwsgi.py b/django/contrib/auth/handlers/modwsgi.py index 3229c6714b..5ee4d609f7 100644 --- a/django/contrib/auth/handlers/modwsgi.py +++ b/django/contrib/auth/handlers/modwsgi.py @@ -21,17 +21,12 @@ def check_password(environ, username, password): user = UserModel.objects.get_by_natural_key(username) except UserModel.DoesNotExist: return None - try: - if not user.is_active: - return None - except AttributeError as e: - # a custom user may not support is_active + if not user.is_active: return None return user.check_password(password) finally: db.close_connection() - def groups_for_user(environ, username): """ Authorizes a user based on groups diff --git a/django/contrib/auth/management/__init__.py b/django/contrib/auth/management/__init__.py index b5fd29a1c2..ce5d57fa79 100644 --- a/django/contrib/auth/management/__init__.py +++ b/django/contrib/auth/management/__init__.py @@ -10,6 +10,7 @@ import unicodedata from django.contrib.auth import models as auth_app, get_user_model from django.core import exceptions from django.core.management.base import CommandError +from django.db import DEFAULT_DB_ALIAS, router from django.db.models import get_models, signals from django.utils import six from django.utils.six.moves import input @@ -57,7 +58,10 @@ def _check_permission_clashing(custom, builtin, ctype): (codename, ctype.app_label, ctype.model_class().__name__)) pool.add(codename) -def create_permissions(app, created_models, verbosity, **kwargs): +def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kwargs): + if not router.allow_syncdb(db, auth_app.Permission): + return + from django.contrib.contenttypes.models import ContentType app_models = get_models(app) @@ -68,7 +72,9 @@ def create_permissions(app, created_models, verbosity, **kwargs): # The codenames and ctypes that should exist. ctypes = set() for klass in app_models: - ctype = ContentType.objects.get_for_model(klass) + # Force looking up the content types in the current database + # before creating foreign keys to them. + ctype = ContentType.objects.db_manager(db).get_for_model(klass) ctypes.add(ctype) for perm in _get_all_permissions(klass._meta, ctype): searched_perms.append((ctype, perm)) @@ -76,21 +82,21 @@ def create_permissions(app, created_models, verbosity, **kwargs): # Find all the Permissions that have a context_type for a model we're # looking for. We don't need to check for codenames since we already have # a list of the ones we're going to create. - all_perms = set(auth_app.Permission.objects.filter( + all_perms = set(auth_app.Permission.objects.using(db).filter( content_type__in=ctypes, ).values_list( "content_type", "codename" )) - objs = [ + perms = [ auth_app.Permission(codename=codename, name=name, content_type=ctype) for ctype, (codename, name) in searched_perms if (ctype.pk, codename) not in all_perms ] - auth_app.Permission.objects.bulk_create(objs) + auth_app.Permission.objects.using(db).bulk_create(perms) if verbosity >= 2: - for obj in objs: - print("Adding permission '%s'" % obj) + for perm in perms: + print("Adding permission '%s'" % perm) def create_superuser(app, created_models, verbosity, db, **kwargs): diff --git a/django/contrib/auth/middleware.py b/django/contrib/auth/middleware.py index 0398cfaf1e..f38efdd1d2 100644 --- a/django/contrib/auth/middleware.py +++ b/django/contrib/auth/middleware.py @@ -1,4 +1,6 @@ from django.contrib import auth +from django.contrib.auth import load_backend +from django.contrib.auth.backends import RemoteUserBackend from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject @@ -47,9 +49,18 @@ class RemoteUserMiddleware(object): try: username = request.META[self.header] except KeyError: - # If specified header doesn't exist then return (leaving - # request.user set to AnonymousUser by the - # AuthenticationMiddleware). + # If specified header doesn't exist then remove any existing + # authenticated remote-user, or return (leaving request.user set to + # AnonymousUser by the AuthenticationMiddleware). + if request.user.is_authenticated(): + try: + stored_backend = load_backend(request.session.get( + auth.BACKEND_SESSION_KEY, '')) + if isinstance(stored_backend, RemoteUserBackend): + auth.logout(request) + except ImproperlyConfigured as e: + # backend failed to load + auth.logout(request) return # If the user is already authenticated and that user is the user we are # getting passed in the headers, then the correct user is already diff --git a/django/contrib/auth/models.py b/django/contrib/auth/models.py index bd7bf4a162..7d62810923 100644 --- a/django/contrib/auth/models.py +++ b/django/contrib/auth/models.py @@ -195,43 +195,13 @@ class UserManager(BaseUserManager): return u -# A few helper functions for common logic between User and AnonymousUser. -def _user_get_all_permissions(user, obj): - permissions = set() - for backend in auth.get_backends(): - if hasattr(backend, "get_all_permissions"): - if obj is not None: - permissions.update(backend.get_all_permissions(user, obj)) - else: - permissions.update(backend.get_all_permissions(user)) - return permissions - - -def _user_has_perm(user, perm, obj): - for backend in auth.get_backends(): - if hasattr(backend, "has_perm"): - if obj is not None: - if backend.has_perm(user, perm, obj): - return True - else: - if backend.has_perm(user, perm): - return True - return False - - -def _user_has_module_perms(user, app_label): - for backend in auth.get_backends(): - if hasattr(backend, "has_module_perms"): - if backend.has_module_perms(user, app_label): - return True - return False - - @python_2_unicode_compatible class AbstractBaseUser(models.Model): password = models.CharField(_('password'), max_length=128) last_login = models.DateTimeField(_('last login'), default=timezone.now) + is_active = True + REQUIRED_FIELDS = [] class Meta: @@ -288,32 +258,46 @@ class AbstractBaseUser(models.Model): raise NotImplementedError() -class AbstractUser(AbstractBaseUser): - """ - An abstract base class implementing a fully featured User model with - admin-compliant permissions. +# A few helper functions for common logic between User and AnonymousUser. +def _user_get_all_permissions(user, obj): + permissions = set() + for backend in auth.get_backends(): + if hasattr(backend, "get_all_permissions"): + if obj is not None: + permissions.update(backend.get_all_permissions(user, obj)) + else: + permissions.update(backend.get_all_permissions(user)) + return permissions - Username, password and email are required. Other fields are optional. + +def _user_has_perm(user, perm, obj): + for backend in auth.get_backends(): + if hasattr(backend, "has_perm"): + if obj is not None: + if backend.has_perm(user, perm, obj): + return True + else: + if backend.has_perm(user, perm): + return True + return False + + +def _user_has_module_perms(user, app_label): + for backend in auth.get_backends(): + if hasattr(backend, "has_module_perms"): + if backend.has_module_perms(user, app_label): + return True + return False + + +class PermissionsMixin(models.Model): + """ + A mixin class that adds the fields and methods necessary to support + Django's Group and Permission model using the ModelBackend. """ - username = models.CharField(_('username'), max_length=30, unique=True, - help_text=_('Required. 30 characters or fewer. Letters, numbers and ' - '@/./+/-/_ characters'), - validators=[ - validators.RegexValidator(re.compile('^[\w.@+-]+$'), _('Enter a valid username.'), 'invalid') - ]) - first_name = models.CharField(_('first name'), max_length=30, blank=True) - last_name = models.CharField(_('last name'), max_length=30, blank=True) - email = models.EmailField(_('email address'), blank=True) - is_staff = models.BooleanField(_('staff status'), default=False, - help_text=_('Designates whether the user can log into this admin ' - 'site.')) - is_active = models.BooleanField(_('active'), default=True, - help_text=_('Designates whether this user should be treated as ' - 'active. Unselect this instead of deleting accounts.')) is_superuser = models.BooleanField(_('superuser status'), default=False, help_text=_('Designates that this user has all permissions without ' 'explicitly assigning them.')) - date_joined = models.DateTimeField(_('date joined'), default=timezone.now) groups = models.ManyToManyField(Group, verbose_name=_('groups'), blank=True, help_text=_('The groups this user belongs to. A user will ' 'get all permissions granted to each of ' @@ -322,30 +306,9 @@ class AbstractUser(AbstractBaseUser): verbose_name=_('user permissions'), blank=True, help_text='Specific permissions for this user.') - objects = UserManager() - - USERNAME_FIELD = 'username' - REQUIRED_FIELDS = ['email'] - class Meta: - verbose_name = _('user') - verbose_name_plural = _('users') abstract = True - def get_absolute_url(self): - return "/users/%s/" % urlquote(self.username) - - def get_full_name(self): - """ - Returns the first_name plus the last_name, with a space in between. - """ - full_name = '%s %s' % (self.first_name, self.last_name) - return full_name.strip() - - def get_short_name(self): - "Returns the short name for the user." - return self.first_name - def get_group_permissions(self, obj=None): """ Returns a list of permission strings that this user has through his/her @@ -403,6 +366,55 @@ class AbstractUser(AbstractBaseUser): return _user_has_module_perms(self, app_label) + +class AbstractUser(AbstractBaseUser, PermissionsMixin): + """ + An abstract base class implementing a fully featured User model with + admin-compliant permissions. + + Username, password and email are required. Other fields are optional. + """ + username = models.CharField(_('username'), max_length=30, unique=True, + help_text=_('Required. 30 characters or fewer. Letters, numbers and ' + '@/./+/-/_ characters'), + validators=[ + validators.RegexValidator(re.compile('^[\w.@+-]+$'), _('Enter a valid username.'), 'invalid') + ]) + first_name = models.CharField(_('first name'), max_length=30, blank=True) + last_name = models.CharField(_('last name'), max_length=30, blank=True) + email = models.EmailField(_('email address'), blank=True) + is_staff = models.BooleanField(_('staff status'), default=False, + help_text=_('Designates whether the user can log into this admin ' + 'site.')) + is_active = models.BooleanField(_('active'), default=True, + help_text=_('Designates whether this user should be treated as ' + 'active. Unselect this instead of deleting accounts.')) + date_joined = models.DateTimeField(_('date joined'), default=timezone.now) + + objects = UserManager() + + USERNAME_FIELD = 'username' + REQUIRED_FIELDS = ['email'] + + class Meta: + verbose_name = _('user') + verbose_name_plural = _('users') + abstract = True + + def get_absolute_url(self): + return "/users/%s/" % urlquote(self.username) + + def get_full_name(self): + """ + Returns the first_name plus the last_name, with a space in between. + """ + full_name = '%s %s' % (self.first_name, self.last_name) + return full_name.strip() + + def get_short_name(self): + "Returns the short name for the user." + return self.first_name + def email_user(self, subject, message, from_email=None): """ Sends an email to this User. diff --git a/django/contrib/auth/tests/__init__.py b/django/contrib/auth/tests/__init__.py index b3007ea484..038c8980d9 100644 --- a/django/contrib/auth/tests/__init__.py +++ b/django/contrib/auth/tests/__init__.py @@ -14,3 +14,16 @@ from django.contrib.auth.tests.tokens import * from django.contrib.auth.tests.views import * # The password for the fixture data users is 'password' + +from django.dispatch import receiver +from django.test.signals import setting_changed + + +@receiver(setting_changed) +def user_model_swapped(**kwargs): + if kwargs['setting'] == 'AUTH_USER_MODEL': + from django.db.models.manager import ensure_default_manager + from django.contrib.auth.models import User + # Reset User manager + setattr(User, 'objects', User._default_manager) + ensure_default_manager(User) diff --git a/django/contrib/auth/tests/auth_backends.py b/django/contrib/auth/tests/auth_backends.py index e92f159ff9..71f18d32cf 100644 --- a/django/contrib/auth/tests/auth_backends.py +++ b/django/contrib/auth/tests/auth_backends.py @@ -4,9 +4,10 @@ from datetime import date from django.conf import settings from django.contrib.auth.models import User, Group, Permission, AnonymousUser from django.contrib.auth.tests.utils import skipIfCustomUser -from django.contrib.auth.tests.custom_user import ExtensionUser +from django.contrib.auth.tests.custom_user import ExtensionUser, CustomPermissionsUser from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import ImproperlyConfigured +from django.core.exceptions import ImproperlyConfigured, PermissionDenied +from django.contrib.auth import authenticate from django.test import TestCase from django.test.utils import override_settings @@ -33,7 +34,7 @@ class BaseModelBackendTest(object): ContentType.objects.clear_cache() def test_has_perm(self): - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) self.assertEqual(user.has_perm('auth.test'), False) user.is_staff = True user.save() @@ -52,14 +53,14 @@ class BaseModelBackendTest(object): self.assertEqual(user.has_perm('auth.test'), False) def test_custom_perms(self): - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) user.save() # reloading user to purge the _perm_cache - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions() == set(['auth.test']), True) self.assertEqual(user.get_group_permissions(), set([])) self.assertEqual(user.has_module_perms('Group'), False) @@ -70,7 +71,7 @@ class BaseModelBackendTest(object): perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3') user.user_permissions.add(perm) user.save() - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), set(['auth.test2', 'auth.test', 'auth.test3'])) self.assertEqual(user.has_perm('test'), False) self.assertEqual(user.has_perm('auth.test'), True) @@ -80,7 +81,7 @@ class BaseModelBackendTest(object): group.permissions.add(perm) group.save() user.groups.add(group) - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) exp = set(['auth.test2', 'auth.test', 'auth.test3', 'auth.test_group']) self.assertEqual(user.get_all_permissions(), exp) self.assertEqual(user.get_group_permissions(), set(['auth.test_group'])) @@ -92,7 +93,7 @@ class BaseModelBackendTest(object): def test_has_no_object_perm(self): """Regressiontest for #12462""" - user = self.UserModel.objects.get(username='test') + user = self.UserModel.objects.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) @@ -105,7 +106,7 @@ class BaseModelBackendTest(object): def test_get_all_superuser_permissions(self): "A superuser has all permissions. Refs #14795" - user = self.UserModel.objects.get(username='test2') + user = self.UserModel.objects.get(pk=self.superuser.pk) self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all())) @@ -117,12 +118,12 @@ class ModelBackendTest(BaseModelBackendTest, TestCase): UserModel = User def create_users(self): - User.objects.create_user( + self.user = User.objects.create_user( username='test', email='test@example.com', password='test', ) - User.objects.create_superuser( + self.superuser = User.objects.create_superuser( username='test2', email='test2@example.com', password='test', @@ -150,13 +151,13 @@ class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase): UserModel = ExtensionUser def create_users(self): - ExtensionUser.objects.create_user( + self.user = ExtensionUser.objects.create_user( username='test', email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) - ExtensionUser.objects.create_superuser( + self.superuser = ExtensionUser.objects.create_superuser( username='test2', email='test2@example.com', password='test', @@ -164,6 +165,31 @@ class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase): ) +@override_settings(AUTH_USER_MODEL='auth.CustomPermissionsUser') +class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase): + """ + Tests for the ModelBackend using the CustomPermissionsUser model. + + As with the ExtensionUser test, this isn't a perfect test, because both + the User and CustomPermissionsUser are synchronized to the database, + which wouldn't ordinary happen in production. + """ + + UserModel = CustomPermissionsUser + + def create_users(self): + self.user = CustomPermissionsUser.objects.create_user( + email='test@example.com', + password='test', + date_of_birth=date(2006, 4, 25) + ) + self.superuser = CustomPermissionsUser.objects.create_superuser( + email='test2@example.com', + password='test', + date_of_birth=date(1976, 11, 8) + ) + + class TestObj(object): pass @@ -323,3 +349,38 @@ class InActiveUserBackendTest(TestCase): def test_has_module_perms(self): self.assertEqual(self.user1.has_module_perms("app1"), False) self.assertEqual(self.user1.has_module_perms("app2"), False) + + +class PermissionDeniedBackend(object): + """ + Always raises PermissionDenied. + """ + supports_object_permissions = True + supports_anonymous_user = True + supports_inactive_user = True + + def authenticate(self, username=None, password=None): + raise PermissionDenied + + +@skipIfCustomUser +class PermissionDeniedBackendTest(TestCase): + """ + Tests that other backends are not checked once a backend raises PermissionDenied + """ + backend = 'django.contrib.auth.tests.auth_backends.PermissionDeniedBackend' + + def setUp(self): + self.user1 = User.objects.create_user('test', 'test@example.com', 'test') + self.user1.save() + + @override_settings(AUTHENTICATION_BACKENDS=(backend, ) + + tuple(settings.AUTHENTICATION_BACKENDS)) + def test_permission_denied(self): + "user is not authenticated after a backend raises permission denied #2550" + self.assertEqual(authenticate(username='test', password='test'), None) + + @override_settings(AUTHENTICATION_BACKENDS=tuple( + settings.AUTHENTICATION_BACKENDS) + (backend, )) + def test_authenticates(self): + self.assertEqual(authenticate(username='test', password='test'), self.user1) diff --git a/django/contrib/auth/tests/basic.py b/django/contrib/auth/tests/basic.py index bc7344f753..2c807cca51 100644 --- a/django/contrib/auth/tests/basic.py +++ b/django/contrib/auth/tests/basic.py @@ -162,6 +162,8 @@ class BasicTestCase(TestCase): def test_swappable_user(self): "The current user model can be swapped out for another" self.assertEqual(get_user_model(), CustomUser) + with self.assertRaises(AttributeError): + User.objects.all() @override_settings(AUTH_USER_MODEL='badsetting') def test_swappable_user_bad_setting(self): diff --git a/django/contrib/auth/tests/context_processors.py b/django/contrib/auth/tests/context_processors.py index 32fea8ac80..f846a828dd 100644 --- a/django/contrib/auth/tests/context_processors.py +++ b/django/contrib/auth/tests/context_processors.py @@ -9,6 +9,7 @@ from django.contrib.auth.context_processors import PermWrapper, PermLookupDict from django.db.models import Q from django.test import TestCase from django.test.utils import override_settings +from django.utils._os import upath class MockUser(object): @@ -63,7 +64,7 @@ class PermWrapperTests(TestCase): @skipIfCustomUser @override_settings( TEMPLATE_DIRS=( - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(upath(__file__)), 'templates'), ), USE_TZ=False, # required for loading the fixture PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',), diff --git a/django/contrib/auth/tests/custom_user.py b/django/contrib/auth/tests/custom_user.py index a29ed6a104..7e042e4895 100644 --- a/django/contrib/auth/tests/custom_user.py +++ b/django/contrib/auth/tests/custom_user.py @@ -1,5 +1,11 @@ from django.db import models -from django.contrib.auth.models import BaseUserManager, AbstractBaseUser, AbstractUser, UserManager +from django.contrib.auth.models import ( + BaseUserManager, + AbstractBaseUser, + AbstractUser, + UserManager, + PermissionsMixin +) # The custom User uses email as the unique identifier, and requires @@ -88,3 +94,53 @@ class ExtensionUser(AbstractUser): class Meta: app_label = 'auth' + + +# The CustomPermissionsUser users email as the identifier, but uses the normal +# Django permissions model. This allows us to check that the PermissionsMixin +# includes everything that is needed to interact with the ModelBackend. + +class CustomPermissionsUserManager(CustomUserManager): + def create_superuser(self, email, password, date_of_birth): + u = self.create_user(email, password=password, date_of_birth=date_of_birth) + u.is_superuser = True + u.save(using=self._db) + return u + + +class CustomPermissionsUser(AbstractBaseUser, PermissionsMixin): + email = models.EmailField(verbose_name='email address', max_length=255, unique=True) + date_of_birth = models.DateField() + + objects = CustomPermissionsUserManager() + + USERNAME_FIELD = 'email' + REQUIRED_FIELDS = ['date_of_birth'] + + class Meta: + app_label = 'auth' + + def get_full_name(self): + return self.email + + def get_short_name(self): + return self.email + + def __unicode__(self): + return self.email + + +class IsActiveTestUser1(AbstractBaseUser): + """ + This test user class and derivatives test the default is_active behavior + """ + username = models.CharField(max_length=30, unique=True) + + objects = BaseUserManager() + + USERNAME_FIELD = 'username' + + class Meta: + app_label = 'auth' + + # the is_active attr is provided by AbstractBaseUser diff --git a/django/contrib/auth/tests/forms.py b/django/contrib/auth/tests/forms.py index f3eb24287e..a9f894905a 100644 --- a/django/contrib/auth/tests/forms.py +++ b/django/contrib/auth/tests/forms.py @@ -3,13 +3,15 @@ from __future__ import unicode_literals import os from django.contrib.auth.models import User from django.contrib.auth.forms import (UserCreationForm, AuthenticationForm, - PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm) + PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm, + ReadOnlyPasswordHashWidget) from django.contrib.auth.tests.utils import skipIfCustomUser from django.core import mail from django.forms.fields import Field, EmailField from django.test import TestCase from django.test.utils import override_settings from django.utils.encoding import force_text +from django.utils._os import upath from django.utils import translation from django.utils.translation import ugettext as _ @@ -98,7 +100,9 @@ class AuthenticationFormTest(TestCase): form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), - [force_text(form.error_messages['invalid_login'])]) + [force_text(form.error_messages['invalid_login'] % { + 'username': User._meta.get_field('username').verbose_name + })]) def test_inactive_user(self): # The user is inactive. @@ -282,6 +286,14 @@ class UserChangeFormTest(TestCase): self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password'], 'sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161') + def test_bug_19349_bound_password_field(self): + user = User.objects.get(username='testclient') + form = UserChangeForm(data={}, instance=user) + # When rendering the bound password field, + # ReadOnlyPasswordHashWidget needs the initial + # value to render correctly + self.assertEqual(form.initial['password'], form['password'].value()) + @skipIfCustomUser @override_settings(USE_TZ=False, PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) @@ -322,7 +334,7 @@ class PasswordResetFormTest(TestCase): self.assertEqual(form.cleaned_data['email'], email) def test_custom_email_subject(self): - template_path = os.path.join(os.path.dirname(__file__), 'templates') + template_path = os.path.join(os.path.dirname(upath(__file__)), 'templates') with self.settings(TEMPLATE_DIRS=(template_path,)): data = {'email': 'testclient@example.com'} form = PasswordResetForm(data) @@ -362,3 +374,13 @@ class PasswordResetFormTest(TestCase): self.assertFalse(form.is_valid()) self.assertEqual(form["email"].errors, [_("The user account associated with this email address cannot reset the password.")]) + + +class ReadOnlyPasswordHashWidgetTest(TestCase): + + def test_bug_19349_render_with_none_value(self): + # Rendering the widget with value set to None + # mustn't raise an exception. + widget = ReadOnlyPasswordHashWidget() + html = widget.render(name='password', value=None, attrs={}) + self.assertIn(_("No password set."), html) diff --git a/django/contrib/auth/tests/handlers.py b/django/contrib/auth/tests/handlers.py index a867aae47a..04ab46f75b 100644 --- a/django/contrib/auth/tests/handlers.py +++ b/django/contrib/auth/tests/handlers.py @@ -2,30 +2,23 @@ from __future__ import unicode_literals from django.contrib.auth.handlers.modwsgi import check_password, groups_for_user from django.contrib.auth.models import User, Group +from django.contrib.auth.tests import CustomUser from django.contrib.auth.tests.utils import skipIfCustomUser from django.test import TransactionTestCase +from django.test.utils import override_settings class ModWsgiHandlerTestCase(TransactionTestCase): """ Tests for the mod_wsgi authentication handler """ - - def setUp(self): - user1 = User.objects.create_user('test', 'test@example.com', 'test') - User.objects.create_user('test1', 'test1@example.com', 'test1') - group = Group.objects.create(name='test_group') - user1.groups.add(group) - + @skipIfCustomUser def test_check_password(self): """ Verify that check_password returns the correct values as per http://code.google.com/p/modwsgi/wiki/AccessControlMechanisms#Apache_Authentication_Provider - - because the custom user available in the test framework does not - support the is_active attribute, we can't test this with a custom - user. """ + User.objects.create_user('test', 'test@example.com', 'test') # User not in database self.assertTrue(check_password({}, 'unknown', '') is None) @@ -33,15 +26,43 @@ class ModWsgiHandlerTestCase(TransactionTestCase): # Valid user with correct password self.assertTrue(check_password({}, 'test', 'test')) + # correct password, but user is inactive + User.objects.filter(username='test').update(is_active=False) + self.assertFalse(check_password({}, 'test', 'test')) + # Valid user with incorrect password self.assertFalse(check_password({}, 'test', 'incorrect')) + @override_settings(AUTH_USER_MODEL='auth.CustomUser') + def test_check_password_custom_user(self): + """ + Verify that check_password returns the correct values as per + http://code.google.com/p/modwsgi/wiki/AccessControlMechanisms#Apache_Authentication_Provider + + with custom user installed + """ + + CustomUser.objects.create_user('test@example.com', '1990-01-01', 'test') + + # User not in database + self.assertTrue(check_password({}, 'unknown', '') is None) + + # Valid user with correct password' + self.assertTrue(check_password({}, 'test@example.com', 'test')) + + # Valid user with incorrect password + self.assertFalse(check_password({}, 'test@example.com', 'incorrect')) + @skipIfCustomUser def test_groups_for_user(self): """ Check that groups_for_user returns correct values as per http://code.google.com/p/modwsgi/wiki/AccessControlMechanisms#Apache_Group_Authorisation """ + user1 = User.objects.create_user('test', 'test@example.com', 'test') + User.objects.create_user('test1', 'test1@example.com', 'test1') + group = Group.objects.create(name='test_group') + user1.groups.add(group) # User not in database self.assertEqual(groups_for_user({}, 'unknown'), []) diff --git a/django/contrib/auth/tests/models.py b/django/contrib/auth/tests/models.py index 252a0887c8..ca65dee71b 100644 --- a/django/contrib/auth/tests/models.py +++ b/django/contrib/auth/tests/models.py @@ -1,4 +1,5 @@ from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.auth.models import (Group, User, SiteProfileNotAvailable, UserManager) from django.contrib.auth.tests.utils import skipIfCustomUser @@ -98,3 +99,36 @@ class UserManagerTestCase(TestCase): self.assertRaisesMessage(ValueError, 'The given username must be set', User.objects.create_user, username='') + + +class IsActiveTestCase(TestCase): + """ + Tests the behavior of the guaranteed is_active attribute + """ + + @skipIfCustomUser + def test_builtin_user_isactive(self): + user = User.objects.create(username='foo', email='foo@bar.com') + # is_active is true by default + self.assertEqual(user.is_active, True) + user.is_active = False + user.save() + user_fetched = User.objects.get(pk=user.pk) + # the is_active flag is saved + self.assertFalse(user_fetched.is_active) + + @override_settings(AUTH_USER_MODEL='auth.IsActiveTestUser1') + def test_is_active_field_default(self): + """ + tests that the default value for is_active is provided + """ + UserModel = get_user_model() + user = UserModel(username='foo') + self.assertEqual(user.is_active, True) + # you can set the attribute - but it will not save + user.is_active = False + # there should be no problem saving - but the attribute is not saved + user.save() + user_fetched = UserModel.objects.get(pk=user.pk) + # the attribute is always true for newly retrieved instance + self.assertEqual(user_fetched.is_active, True) diff --git a/django/contrib/auth/tests/remote_user.py b/django/contrib/auth/tests/remote_user.py index 9b0f6f8be3..0e59b291a8 100644 --- a/django/contrib/auth/tests/remote_user.py +++ b/django/contrib/auth/tests/remote_user.py @@ -1,8 +1,9 @@ from datetime import datetime from django.conf import settings +from django.contrib.auth import authenticate from django.contrib.auth.backends import RemoteUserBackend -from django.contrib.auth.models import User +from django.contrib.auth.models import User, AnonymousUser from django.contrib.auth.tests.utils import skipIfCustomUser from django.test import TestCase from django.utils import timezone @@ -23,7 +24,7 @@ class RemoteUserTest(TestCase): self.curr_middleware = settings.MIDDLEWARE_CLASSES self.curr_auth = settings.AUTHENTICATION_BACKENDS settings.MIDDLEWARE_CLASSES += (self.middleware,) - settings.AUTHENTICATION_BACKENDS = (self.backend,) + settings.AUTHENTICATION_BACKENDS += (self.backend,) def test_no_remote_user(self): """ @@ -97,6 +98,26 @@ class RemoteUserTest(TestCase): response = self.client.get('/remote_user/', REMOTE_USER=self.known_user) self.assertEqual(default_login, response.context['user'].last_login) + def test_header_disappears(self): + """ + Tests that a logged in user is logged out automatically when + the REMOTE_USER header disappears during the same browser session. + """ + User.objects.create(username='knownuser') + # Known user authenticates + response = self.client.get('/remote_user/', REMOTE_USER=self.known_user) + self.assertEqual(response.context['user'].username, 'knownuser') + # During the session, the REMOTE_USER header disappears. Should trigger logout. + response = self.client.get('/remote_user/') + self.assertEqual(response.context['user'].is_anonymous(), True) + # verify the remoteuser middleware will not remove a user + # authenticated via another backend + User.objects.create_user(username='modeluser', password='foo') + self.client.login(username='modeluser', password='foo') + authenticate(username='modeluser', password='foo') + response = self.client.get('/remote_user/') + self.assertEqual(response.context['user'].username, 'modeluser') + def tearDown(self): """Restores settings to avoid breaking other tests.""" settings.MIDDLEWARE_CLASSES = self.curr_middleware diff --git a/django/contrib/auth/tests/templates/context_processors/auth_attrs_user.html b/django/contrib/auth/tests/templates/context_processors/auth_attrs_user.html index aa7f784405..dc4c6b17c1 100644 --- a/django/contrib/auth/tests/templates/context_processors/auth_attrs_user.html +++ b/django/contrib/auth/tests/templates/context_processors/auth_attrs_user.html @@ -1,4 +1,4 @@ unicode: {{ user }} -id: {{ user.id }} +id: {{ user.pk }} username: {{ user.username }} url: {% url 'userpage' user %} diff --git a/django/contrib/auth/tests/views.py b/django/contrib/auth/tests/views.py index bb17576d31..6040a2f5b5 100644 --- a/django/contrib/auth/tests/views.py +++ b/django/contrib/auth/tests/views.py @@ -11,6 +11,7 @@ from django.http import QueryDict from django.utils.encoding import force_text from django.utils.html import escape from django.utils.http import urlquote +from django.utils._os import upath from django.test import TestCase from django.test.utils import override_settings @@ -27,7 +28,7 @@ from django.contrib.auth.tests.utils import skipIfCustomUser LANGUAGE_CODE='en', TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS, TEMPLATE_DIRS=( - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(upath(__file__)), 'templates'), ), USE_TZ=False, PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',), @@ -115,6 +116,8 @@ class PasswordResetTest(AuthViewsTestCase): self.assertTrue("http://adminsite.com" in mail.outbox[0].body) self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) + # Skip any 500 handler action (like sending more mail...) + @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host(self): "Poisoned HTTP_HOST headers can't be used for reset emails" # This attack is based on the way browsers handle URLs. The colon @@ -131,6 +134,8 @@ class PasswordResetTest(AuthViewsTestCase): ) self.assertEqual(len(mail.outbox), 0) + # Skip any 500 handler action (like sending more mail...) + @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host_admin_site(self): "Poisoned HTTP_HOST headers can't be used for reset emails on admin views" with self.assertRaises(SuspiciousOperation): @@ -243,7 +248,9 @@ class ChangePasswordTest(AuthViewsTestCase): 'username': 'testclient', 'password': password, }) - self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login']) + self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login'] % { + 'username': User._meta.get_field('username').verbose_name + }) def logout(self): response = self.client.get('/logout/') diff --git a/django/contrib/auth/tokens.py b/django/contrib/auth/tokens.py index 930c70012b..6e5bfe7d9d 100644 --- a/django/contrib/auth/tokens.py +++ b/django/contrib/auth/tokens.py @@ -58,7 +58,7 @@ class PasswordResetTokenGenerator(object): # Ensure results are consistent across DB backends login_timestamp = user.last_login.replace(microsecond=0, tzinfo=None) - value = (six.text_type(user.id) + user.password + + value = (six.text_type(user.pk) + user.password + six.text_type(login_timestamp) + six.text_type(timestamp)) hash = salted_hmac(key_salt, value).hexdigest()[::2] return "%s-%s" % (ts_b36, hash) diff --git a/django/contrib/auth/views.py b/django/contrib/auth/views.py index d27e2f5aba..8514345d00 100644 --- a/django/contrib/auth/views.py +++ b/django/contrib/auth/views.py @@ -7,7 +7,7 @@ from django.conf import settings from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, QueryDict from django.template.response import TemplateResponse -from django.utils.http import base36_to_int +from django.utils.http import base36_to_int, is_safe_url from django.utils.translation import ugettext as _ from django.shortcuts import resolve_url from django.views.decorators.debug import sensitive_post_parameters @@ -37,18 +37,12 @@ def login(request, template_name='registration/login.html', if request.method == "POST": form = authentication_form(data=request.POST) if form.is_valid(): - # Use default setting if redirect_to is empty - if not redirect_to: - redirect_to = settings.LOGIN_REDIRECT_URL - redirect_to = resolve_url(redirect_to) - netloc = urlparse(redirect_to)[1] - # Heavier security check -- don't allow redirection to a different - # host. - if netloc and netloc != request.get_host(): + # Ensure the user-originating redirection url is safe. + if not is_safe_url(url=redirect_to, host=request.get_host()): redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL) - # Okay, security checks complete. Log the user in. + # Okay, security check complete. Log the user in. auth_login(request, form.get_user()) if request.session.test_cookie_worked(): @@ -82,27 +76,27 @@ def logout(request, next_page=None, Logs out the user and displays 'You are logged out' message. """ auth_logout(request) - redirect_to = request.REQUEST.get(redirect_field_name, '') - if redirect_to: - netloc = urlparse(redirect_to)[1] - # Security check -- don't allow redirection to a different host. - if not (netloc and netloc != request.get_host()): - return HttpResponseRedirect(redirect_to) - if next_page is None: - current_site = get_current_site(request) - context = { - 'site': current_site, - 'site_name': current_site.name, - 'title': _('Logged out') - } - if extra_context is not None: - context.update(extra_context) - return TemplateResponse(request, template_name, context, - current_app=current_app) - else: + if redirect_field_name in request.REQUEST: + next_page = request.REQUEST[redirect_field_name] + # Security check -- don't allow redirection to a different host. + if not is_safe_url(url=next_page, host=request.get_host()): + next_page = request.path + + if next_page: # Redirect to this page until the session has been cleared. - return HttpResponseRedirect(next_page or request.path) + return HttpResponseRedirect(next_page) + + current_site = get_current_site(request) + context = { + 'site': current_site, + 'site_name': current_site.name, + 'title': _('Logged out') + } + if extra_context is not None: + context.update(extra_context) + return TemplateResponse(request, template_name, context, + current_app=current_app) def logout_then_login(request, login_url=None, current_app=None, extra_context=None): @@ -206,7 +200,7 @@ def password_reset_confirm(request, uidb36=None, token=None, post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete') try: uid_int = base36_to_int(uidb36) - user = UserModel.objects.get(id=uid_int) + user = UserModel.objects.get(pk=uid_int) except (ValueError, OverflowError, UserModel.DoesNotExist): user = None diff --git a/django/contrib/comments/__init__.py b/django/contrib/comments/__init__.py index 42384e786b..1798c1adb5 100644 --- a/django/contrib/comments/__init__.py +++ b/django/contrib/comments/__init__.py @@ -20,9 +20,9 @@ def get_comment_app(): # Try to import the package try: package = import_module(comments_app) - except ImportError: + except ImportError as e: raise ImproperlyConfigured("The COMMENTS_APP setting refers to "\ - "a non-existing package.") + "a non-existing package. (%s)" % e) return package diff --git a/django/contrib/comments/signals.py b/django/contrib/comments/signals.py index fe1083bd14..079afaf03a 100644 --- a/django/contrib/comments/signals.py +++ b/django/contrib/comments/signals.py @@ -6,7 +6,7 @@ from django.dispatch import Signal # Sent just before a comment will be posted (after it's been approved and # moderated; this can be used to modify the comment (in place) with posting # details or other such actions. If any receiver returns False the comment will be -# discarded and a 403 (not allowed) response. This signal is sent at more or less +# discarded and a 400 response. This signal is sent at more or less # the same time (just before, actually) as the Comment object's pre-save signal, # except that the HTTP request is sent along with this signal. comment_will_be_posted = Signal(providing_args=["comment", "request"]) diff --git a/django/contrib/comments/views/comments.py b/django/contrib/comments/views/comments.py index 27d5a48ac6..7c02b21b6a 100644 --- a/django/contrib/comments/views/comments.py +++ b/django/contrib/comments/views/comments.py @@ -44,9 +44,6 @@ def post_comment(request, next=None, using=None): if not data.get('email', ''): data["email"] = request.user.email - # Check to see if the POST data overrides the view's next argument. - next = data.get("next", next) - # Look up the object we're trying to comment about ctype = data.get("content_type") object_pk = data.get("object_pk") @@ -100,7 +97,7 @@ def post_comment(request, next=None, using=None): template_list, { "comment": form.data.get("comment", ""), "form": form, - "next": next, + "next": data.get("next", next), }, RequestContext(request, {}) ) @@ -131,7 +128,8 @@ def post_comment(request, next=None, using=None): request=request ) - return next_redirect(data, next, comment_done, c=comment._get_pk_val()) + return next_redirect(request, fallback=next or 'comments-comment-done', + c=comment._get_pk_val()) comment_done = confirmation_view( template="comments/posted.html", diff --git a/django/contrib/comments/views/moderation.py b/django/contrib/comments/views/moderation.py index 39933e75c8..31bb98fa63 100644 --- a/django/contrib/comments/views/moderation.py +++ b/django/contrib/comments/views/moderation.py @@ -10,7 +10,6 @@ from django.shortcuts import get_object_or_404, render_to_response from django.views.decorators.csrf import csrf_protect - @csrf_protect @login_required def flag(request, comment_id, next=None): @@ -27,7 +26,8 @@ def flag(request, comment_id, next=None): # Flag on POST if request.method == 'POST': perform_flag(request, comment) - return next_redirect(request.POST.copy(), next, flag_done, c=comment.pk) + return next_redirect(request, fallback=next or 'comments-flag-done', + c=comment.pk) # Render a form on GET else: @@ -54,7 +54,8 @@ def delete(request, comment_id, next=None): if request.method == 'POST': # Flag the comment as deleted instead of actually deleting it. perform_delete(request, comment) - return next_redirect(request.POST.copy(), next, delete_done, c=comment.pk) + return next_redirect(request, fallback=next or 'comments-delete-done', + c=comment.pk) # Render a form on GET else: @@ -81,7 +82,8 @@ def approve(request, comment_id, next=None): if request.method == 'POST': # Flag the comment as approved. perform_approve(request, comment) - return next_redirect(request.POST.copy(), next, approve_done, c=comment.pk) + return next_redirect(request, fallback=next or 'comments-approve-done', + c=comment.pk) # Render a form on GET else: diff --git a/django/contrib/comments/views/utils.py b/django/contrib/comments/views/utils.py index abaed68560..79f6376232 100644 --- a/django/contrib/comments/views/utils.py +++ b/django/contrib/comments/views/utils.py @@ -9,25 +9,26 @@ except ImportError: # Python 2 from urllib import urlencode from django.http import HttpResponseRedirect -from django.core import urlresolvers -from django.shortcuts import render_to_response +from django.shortcuts import render_to_response, resolve_url from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from django.contrib import comments +from django.utils.http import is_safe_url -def next_redirect(data, default, default_view, **get_kwargs): +def next_redirect(request, fallback, **get_kwargs): """ Handle the "where should I go next?" part of comment views. - The next value could be a kwarg to the function (``default``), or a - ``?next=...`` GET arg, or the URL of a given view (``default_view``). See + The next value could be a + ``?next=...`` GET arg or the URL of a given view (``fallback``). See the view modules for examples. Returns an ``HttpResponseRedirect``. """ - next = data.get("next", default) - if next is None: - next = urlresolvers.reverse(default_view) + next = request.POST.get('next') + if not is_safe_url(url=next, host=request.get_host()): + next = resolve_url(fallback) + if get_kwargs: if '#' in next: tmp = next.rsplit('#', 1) diff --git a/django/contrib/contenttypes/generic.py b/django/contrib/contenttypes/generic.py index 29e93eefe7..6aff07e568 100644 --- a/django/contrib/contenttypes/generic.py +++ b/django/contrib/contenttypes/generic.py @@ -5,20 +5,19 @@ from __future__ import unicode_literals from collections import defaultdict from functools import partial -from operator import attrgetter from django.core.exceptions import ObjectDoesNotExist from django.db import connection from django.db.models import signals from django.db import models, router, DEFAULT_DB_ALIAS from django.db.models.fields.related import RelatedField, Field, ManyToManyRel -from django.db.models.loading import get_model from django.forms import ModelForm from django.forms.models import BaseModelFormSet, modelformset_factory, save_instance from django.contrib.admin.options import InlineModelAdmin, flatten_fieldsets from django.contrib.contenttypes.models import ContentType from django.utils.encoding import smart_text + class GenericForeignKey(object): """ Provides a generic relation to any object through content-type/object-id @@ -52,9 +51,6 @@ class GenericForeignKey(object): kwargs[self.fk_field] = value._get_pk_val() def get_content_type(self, obj=None, id=None, using=None): - # Convenience function using get_model avoids a circular import when - # using this model - ContentType = get_model("contenttypes", "contenttype") if obj: return ContentType.objects.db_manager(obj._state.db).get_for_model(obj) elif id: @@ -209,18 +205,16 @@ class GenericRelation(RelatedField, Field): # same db_type as well. return None - def extra_filters(self, pieces, pos, negate): + def get_content_type(self): """ - Return an extra filter to the queryset so that the results are filtered - on the appropriate content type. + Returns the content type associated with this field's model. """ - if negate: - return [] - ContentType = get_model("contenttypes", "contenttype") - content_type = ContentType.objects.get_for_model(self.model) - prefix = "__".join(pieces[:pos + 1]) - return [("%s__%s" % (prefix, self.content_type_field_name), - content_type)] + return ContentType.objects.get_for_model(self.model) + + def get_extra_join_sql(self, connection, qn, lhs_alias, rhs_alias): + extra_col = self.rel.to._meta.get_field_by_name(self.content_type_field_name)[0].column + contenttype = self.get_content_type().pk + return " AND %s.%s = %%s" % (qn(rhs_alias), qn(extra_col)), [contenttype] def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS): """ @@ -251,9 +245,6 @@ class ReverseGenericRelatedObjectsDescriptor(object): if instance is None: return self - # This import is done here to avoid circular import importing this module - from django.contrib.contenttypes.models import ContentType - # Dynamically create a class that subclasses the related model's # default manager. rel_model = self.field.rel.to @@ -329,8 +320,11 @@ def create_generic_related_manager(superclass): set(obj._get_pk_val() for obj in instances) } qs = super(GenericRelatedObjectManager, self).get_query_set().using(db).filter(**query) + # We (possibly) need to convert object IDs to the type of the + # instances' PK in order to match up instances: + object_id_converter = instances[0]._meta.pk.to_python return (qs, - attrgetter(self.object_id_field_name), + lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)), lambda obj: obj._get_pk_val(), False, self.prefetch_cache_name) @@ -381,8 +375,6 @@ class BaseGenericInlineFormSet(BaseModelFormSet): def __init__(self, data=None, files=None, instance=None, save_as_new=None, prefix=None, queryset=None): - # Avoid a circular import. - from django.contrib.contenttypes.models import ContentType opts = self.model._meta self.instance = instance self.rel_name = '-'.join(( @@ -411,8 +403,6 @@ class BaseGenericInlineFormSet(BaseModelFormSet): )) def save_new(self, form, commit=True): - # Avoid a circular import. - from django.contrib.contenttypes.models import ContentType kwargs = { self.ct_field.get_attname(): ContentType.objects.get_for_model(self.instance).pk, self.ct_fk_field.get_attname(): self.instance.pk, @@ -434,8 +424,6 @@ def generic_inlineformset_factory(model, form=ModelForm, defaults ``content_type`` and ``object_id`` respectively. """ opts = model._meta - # Avoid a circular import. - from django.contrib.contenttypes.models import ContentType # if there is no field called `ct_field` let the exception propagate ct_field = opts.get_field(ct_field) if not isinstance(ct_field, models.ForeignKey) or ct_field.rel.to != ContentType: diff --git a/django/contrib/contenttypes/management.py b/django/contrib/contenttypes/management.py index 9f287d494b..8329ab65d9 100644 --- a/django/contrib/contenttypes/management.py +++ b/django/contrib/contenttypes/management.py @@ -1,14 +1,19 @@ from django.contrib.contenttypes.models import ContentType +from django.db import DEFAULT_DB_ALIAS, router from django.db.models import get_apps, get_models, signals from django.utils.encoding import smart_text from django.utils import six from django.utils.six.moves import input -def update_contenttypes(app, created_models, verbosity=2, **kwargs): + +def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, **kwargs): """ Creates content types for models in the given app, removing any model entries that no longer have a matching model class. """ + if not router.allow_syncdb(db, ContentType): + return + ContentType.objects.clear_cache() app_models = get_models(app) if not app_models: @@ -19,10 +24,11 @@ def update_contenttypes(app, created_models, verbosity=2, **kwargs): (model._meta.object_name.lower(), model) for model in app_models ) + # Get all the content types content_types = dict( (ct.model, ct) - for ct in ContentType.objects.filter(app_label=app_label) + for ct in ContentType.objects.using(db).filter(app_label=app_label) ) to_remove = [ ct @@ -30,7 +36,7 @@ def update_contenttypes(app, created_models, verbosity=2, **kwargs): if model_name not in app_models ] - cts = ContentType.objects.bulk_create([ + cts = [ ContentType( name=smart_text(model._meta.verbose_name_raw), app_label=app_label, @@ -38,7 +44,8 @@ def update_contenttypes(app, created_models, verbosity=2, **kwargs): ) for (model_name, model) in six.iteritems(app_models) if model_name not in content_types - ]) + ] + ContentType.objects.using(db).bulk_create(cts) if verbosity >= 2: for ct in cts: print("Adding content type '%s | %s'" % (ct.app_label, ct.model)) @@ -71,6 +78,7 @@ If you're unsure, answer 'no'. if verbosity >= 2: print("Stale content types remain.") + def update_all_contenttypes(verbosity=2, **kwargs): for app in get_apps(): update_contenttypes(app, None, verbosity, **kwargs) diff --git a/django/contrib/flatpages/forms.py b/django/contrib/flatpages/forms.py index e0a63e4323..a848875a9f 100644 --- a/django/contrib/flatpages/forms.py +++ b/django/contrib/flatpages/forms.py @@ -35,7 +35,7 @@ class FlatpageForm(forms.ModelForm): for site in sites: if same_url.filter(sites=site).exists(): raise forms.ValidationError( - _('Flatpage with url %(url)s already exists for site %(site)s' % - {'url': url, 'site': site})) + _('Flatpage with url %(url)s already exists for site %(site)s') % + {'url': url, 'site': site}) return super(FlatpageForm, self).clean() diff --git a/django/contrib/formtools/tests/__init__.py b/django/contrib/formtools/tests/__init__.py index a21ffde533..aa7d5ff7d4 100644 --- a/django/contrib/formtools/tests/__init__.py +++ b/django/contrib/formtools/tests/__init__.py @@ -14,6 +14,7 @@ from django.contrib.formtools.wizard import FormWizard from django.test import TestCase from django.test.html import parse_html from django.test.utils import override_settings +from django.utils._os import upath from django.utils import unittest from django.contrib.formtools.tests.wizard import * @@ -36,7 +37,7 @@ class TestFormPreview(preview.FormPreview): @override_settings( TEMPLATE_DIRS=( - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(upath(__file__)), 'templates'), ), ) class PreviewTests(TestCase): @@ -214,7 +215,7 @@ class DummyRequest(http.HttpRequest): @override_settings( SECRET_KEY="123", TEMPLATE_DIRS=( - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(upath(__file__)), 'templates'), ), ) class WizardTests(TestCase): diff --git a/django/contrib/formtools/tests/wizard/wizardtests/tests.py b/django/contrib/formtools/tests/wizard/wizardtests/tests.py index 586bd59341..4aaea7d56e 100644 --- a/django/contrib/formtools/tests/wizard/wizardtests/tests.py +++ b/django/contrib/formtools/tests/wizard/wizardtests/tests.py @@ -9,6 +9,7 @@ from django.conf import settings from django.contrib.auth.models import User from django.contrib.formtools.wizard.views import CookieWizardView from django.contrib.formtools.tests.wizard.forms import UserForm, UserFormSet +from django.utils._os import upath class WizardTests(object): @@ -72,6 +73,10 @@ class WizardTests(object): self.assertEqual(response.context['wizard']['steps'].current, 'form2') self.assertEqual(response.context.get('another_var', None), True) + # ticket #19025: `form` should be included in context + form = response.context_data['wizard']['form'] + self.assertEqual(response.context_data['form'], form) + def test_form_finish(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) @@ -82,7 +87,7 @@ class WizardTests(object): self.assertEqual(response.context['wizard']['steps'].current, 'form2') post_data = self.wizard_step_data[1] - post_data['form2-file1'] = open(__file__, 'rb') + post_data['form2-file1'] = open(upath(__file__), 'rb') response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form3') @@ -95,7 +100,7 @@ class WizardTests(object): self.assertEqual(response.status_code, 200) all_data = response.context['form_list'] - with open(__file__, 'rb') as f: + with open(upath(__file__), 'rb') as f: self.assertEqual(all_data[1]['file1'].read(), f.read()) all_data[1]['file1'].close() del all_data[1]['file1'] @@ -114,7 +119,7 @@ class WizardTests(object): self.assertEqual(response.status_code, 200) post_data = self.wizard_step_data[1] - with open(__file__, 'rb') as post_file: + with open(upath(__file__), 'rb') as post_file: post_data['form2-file1'] = post_file response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) @@ -126,7 +131,7 @@ class WizardTests(object): self.assertEqual(response.status_code, 200) all_data = response.context['all_cleaned_data'] - with open(__file__, 'rb') as f: + with open(upath(__file__), 'rb') as f: self.assertEqual(all_data['file1'].read(), f.read()) all_data['file1'].close() del all_data['file1'] @@ -146,7 +151,7 @@ class WizardTests(object): post_data = self.wizard_step_data[1] post_data['form2-file1'].close() - post_data['form2-file1'] = open(__file__, 'rb') + post_data['form2-file1'] = open(upath(__file__), 'rb') response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) @@ -174,7 +179,7 @@ class WizardTests(object): post_data = self.wizard_step_data[1] post_data['form2-file1'].close() - post_data['form2-file1'] = open(__file__, 'rb') + post_data['form2-file1'] = open(upath(__file__), 'rb') response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form3') @@ -287,7 +292,7 @@ class WizardTestKwargs(TestCase): self.wizard_step_data[0]['form1-user'] = self.testuser.pk def test_template(self): - templates = os.path.join(os.path.dirname(__file__), 'templates') + templates = os.path.join(os.path.dirname(upath(__file__)), 'templates') with self.settings( TEMPLATE_DIRS=list(settings.TEMPLATE_DIRS) + [templates]): response = self.client.get(self.wizard_url) diff --git a/django/contrib/formtools/wizard/storage/base.py b/django/contrib/formtools/wizard/storage/base.py index aafc833484..2e59679d09 100644 --- a/django/contrib/formtools/wizard/storage/base.py +++ b/django/contrib/formtools/wizard/storage/base.py @@ -69,7 +69,9 @@ class BaseStorage(object): wizard_files = self.data[self.step_files_key].get(step, {}) if wizard_files and not self.file_storage: - raise NoFileStorageConfigured + raise NoFileStorageConfigured( + "You need to define 'file_storage' in your " + "wizard view in order to handle file uploads.") files = {} for field, field_dict in six.iteritems(wizard_files): @@ -81,7 +83,9 @@ class BaseStorage(object): def set_step_files(self, step, files): if files and not self.file_storage: - raise NoFileStorageConfigured + raise NoFileStorageConfigured( + "You need to define 'file_storage' in your " + "wizard view in order to handle file uploads.") if step not in self.data[self.step_files_key]: self.data[self.step_files_key][step] = {} diff --git a/django/contrib/formtools/wizard/views.py b/django/contrib/formtools/wizard/views.py index ea41e86852..cba39151e7 100644 --- a/django/contrib/formtools/wizard/views.py +++ b/django/contrib/formtools/wizard/views.py @@ -174,7 +174,9 @@ class WizardView(TemplateView): for field in six.itervalues(form.base_fields): if (isinstance(field, forms.FileField) and not hasattr(cls, 'file_storage')): - raise NoFileStorageConfigured + raise NoFileStorageConfigured( + "You need to define 'file_storage' in your " + "wizard view in order to handle file uploads.") # build the kwargs for the wizardview instances kwargs['form_list'] = init_form_list @@ -436,8 +438,8 @@ class WizardView(TemplateView): def get_all_cleaned_data(self): """ Returns a merged dictionary of all step cleaned_data dictionaries. - If a step contains a `FormSet`, the key will be prefixed with formset - and contain a list of the formset cleaned_data dictionaries. + If a step contains a `FormSet`, the key will be prefixed with + 'formset-' and contain a list of the formset cleaned_data dictionaries. """ cleaned_data = {} for form_key in self.get_form_list(): @@ -458,8 +460,8 @@ class WizardView(TemplateView): def get_cleaned_data_for_step(self, step): """ Returns the cleaned data for a given `step`. Before returning the - cleaned data, the stored values are being revalidated through the - form. If the data doesn't validate, None will be returned. + cleaned data, the stored values are revalidated through the form. + If the data doesn't validate, None will be returned. """ if step in self.form_list: form_obj = self.get_form(step=step, @@ -528,7 +530,7 @@ class WizardView(TemplateView): context.update({'another_var': True}) return context """ - context = super(WizardView, self).get_context_data(**kwargs) + context = super(WizardView, self).get_context_data(form=form, **kwargs) context.update(self.storage.extra_data) context['wizard'] = { 'form': form, diff --git a/django/contrib/gis/db/backends/oracle/compiler.py b/django/contrib/gis/db/backends/oracle/compiler.py index f0eb5cad00..98da0163ba 100644 --- a/django/contrib/gis/db/backends/oracle/compiler.py +++ b/django/contrib/gis/db/backends/oracle/compiler.py @@ -7,29 +7,7 @@ class GeoSQLCompiler(BaseGeoSQLCompiler, SQLCompiler): pass class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler): - def placeholder(self, field, val): - if field is None: - # A field value of None means the value is raw. - return val - elif hasattr(field, 'get_placeholder'): - # Some fields (e.g. geo fields) need special munging before - # they can be inserted. - ph = field.get_placeholder(val, self.connection) - if ph == 'NULL': - # If the placeholder returned is 'NULL', then we need to - # to remove None from the Query parameters. Specifically, - # cx_Oracle will assume a CHAR type when a placeholder ('%s') - # is used for columns of MDSYS.SDO_GEOMETRY. Thus, we use - # 'NULL' for the value, and remove None from the query params. - # See also #10888. - param_idx = self.query.columns.index(field.column) - params = list(self.query.params) - params.pop(param_idx) - self.query.params = tuple(params) - return ph - else: - # Return the common case for the placeholder - return '%s' + pass class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler): pass diff --git a/django/contrib/gis/db/backends/oracle/operations.py b/django/contrib/gis/db/backends/oracle/operations.py index 35a4d9491d..4e42b4cf00 100644 --- a/django/contrib/gis/db/backends/oracle/operations.py +++ b/django/contrib/gis/db/backends/oracle/operations.py @@ -288,3 +288,12 @@ class OracleOperations(DatabaseOperations, BaseSpatialOperations): def spatial_ref_sys(self): from django.contrib.gis.db.backends.oracle.models import SpatialRefSys return SpatialRefSys + + def modify_insert_params(self, placeholders, params): + """Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial + backend due to #10888 + """ + # This code doesn't work for bulk insert cases. + assert len(placeholders) == 1 + return [[param for pholder,param + in six.moves.zip(placeholders[0], params[0]) if pholder != 'NULL'], ] diff --git a/django/contrib/gis/db/backends/postgis/creation.py b/django/contrib/gis/db/backends/postgis/creation.py index 406dc4e487..43ae9a0331 100644 --- a/django/contrib/gis/db/backends/postgis/creation.py +++ b/django/contrib/gis/db/backends/postgis/creation.py @@ -1,12 +1,23 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation +from django.utils.functional import cached_property + class PostGISCreation(DatabaseCreation): geom_index_type = 'GIST' geom_index_ops = 'GIST_GEOMETRY_OPS' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' + @cached_property + def template_postgis(self): + template_postgis = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis') + cursor = self.connection.cursor() + cursor.execute('SELECT 1 FROM pg_database WHERE datname = %s LIMIT 1;', (template_postgis,)) + if cursor.fetchone(): + return template_postgis + return None + def sql_indexes_for_field(self, model, f, style): "Return any spatial index creation SQL for the field." from django.contrib.gis.db.models.fields import GeometryField @@ -67,5 +78,19 @@ class PostGISCreation(DatabaseCreation): return output def sql_table_creation_suffix(self): - postgis_template = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis') - return ' TEMPLATE %s' % self.connection.ops.quote_name(postgis_template) + if self.template_postgis is not None: + return ' TEMPLATE %s' % ( + self.connection.ops.quote_name(self.template_postgis),) + return '' + + def _create_test_db(self, verbosity, autoclobber): + test_database_name = super(PostGISCreation, self)._create_test_db(verbosity, autoclobber) + if self.template_postgis is None: + # Connect to the test database in order to create the postgis extension + self.connection.close() + self.connection.settings_dict["NAME"] = test_database_name + cursor = self.connection.cursor() + cursor.execute("CREATE EXTENSION postgis") + cursor.connection.commit() + + return test_database_name diff --git a/django/contrib/gis/db/backends/spatialite/base.py b/django/contrib/gis/db/backends/spatialite/base.py index b447d1d9ff..b3a53820c1 100644 --- a/django/contrib/gis/db/backends/spatialite/base.py +++ b/django/contrib/gis/db/backends/spatialite/base.py @@ -36,29 +36,23 @@ class DatabaseWrapper(SQLiteDatabaseWrapper): self.creation = SpatiaLiteCreation(self) self.introspection = SpatiaLiteIntrospection(self) - def _cursor(self): - if self.connection is None: - self._sqlite_create_connection() - - ## From here on, customized for GeoDjango ## - - # Enabling extension loading on the SQLite connection. - try: - self.connection.enable_load_extension(True) - except AttributeError: - raise ImproperlyConfigured('The pysqlite library does not support C extension loading. ' - 'Both SQLite and pysqlite must be configured to allow ' - 'the loading of extensions to use SpatiaLite.' - ) - - # Loading the SpatiaLite library extension on the connection, and returning - # the created cursor. - cur = self.connection.cursor(factory=SQLiteCursorWrapper) - try: - cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,)) - except Exception as msg: - raise ImproperlyConfigured('Unable to load the SpatiaLite library extension ' - '"%s" because: %s' % (self.spatialite_lib, msg)) - return cur - else: - return self.connection.cursor(factory=SQLiteCursorWrapper) + def get_new_connection(self, conn_params): + conn = super(DatabaseWrapper, self).get_new_connection(conn_params) + # Enabling extension loading on the SQLite connection. + try: + conn.enable_load_extension(True) + except AttributeError: + raise ImproperlyConfigured( + 'The pysqlite library does not support C extension loading. ' + 'Both SQLite and pysqlite must be configured to allow ' + 'the loading of extensions to use SpatiaLite.') + # Loading the SpatiaLite library extension on the connection, and returning + # the created cursor. + cur = conn.cursor(factory=SQLiteCursorWrapper) + try: + cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,)) + except Exception as msg: + raise ImproperlyConfigured('Unable to load the SpatiaLite library extension ' + '"%s" because: %s' % (self.spatialite_lib, msg)) + cur.close() + return conn diff --git a/django/contrib/gis/db/models/query.py b/django/contrib/gis/db/models/query.py index 2ffbd2021b..c89912b2d9 100644 --- a/django/contrib/gis/db/models/query.py +++ b/django/contrib/gis/db/models/query.py @@ -760,8 +760,10 @@ class GeoQuerySet(QuerySet): self.query.add_select_related([field_name]) compiler = self.query.get_compiler(self.db) compiler.pre_sql_setup() - rel_table, rel_col = self.query.related_select_cols[self.query.related_select_fields.index(geo_field)] - return compiler._field_column(geo_field, rel_table) + for (rel_table, rel_col), field in self.query.related_select_cols: + if field == geo_field: + return compiler._field_column(geo_field, rel_table) + raise ValueError("%r not in self.query.related_select_cols" % geo_field) elif not geo_field in opts.local_fields: # This geographic field is inherited from another model, so we have to # use the db table for the _parent_ model instead. diff --git a/django/contrib/gis/db/models/sql/compiler.py b/django/contrib/gis/db/models/sql/compiler.py index cf6a8ad047..81a9941c9e 100644 --- a/django/contrib/gis/db/models/sql/compiler.py +++ b/django/contrib/gis/db/models/sql/compiler.py @@ -39,7 +39,7 @@ class GeoSQLCompiler(compiler.SQLCompiler): if self.query.select: only_load = self.deferred_to_columns() # This loop customized for GeoQuery. - for col, field in zip(self.query.select, self.query.select_fields): + for col, field in self.query.select: if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias].table_name @@ -85,7 +85,7 @@ class GeoSQLCompiler(compiler.SQLCompiler): ]) # This loop customized for GeoQuery. - for (table, col), field in zip(self.query.related_select_cols, self.query.related_select_fields): + for (table, col), field in self.query.related_select_cols: r = self.get_field_select(field, table, col) if with_aliases and col in col_aliases: c_alias = 'Col%d' % len(col_aliases) @@ -101,7 +101,7 @@ class GeoSQLCompiler(compiler.SQLCompiler): return result def get_default_columns(self, with_aliases=False, col_aliases=None, - start_alias=None, opts=None, as_pairs=False, local_only=False): + start_alias=None, opts=None, as_pairs=False, from_parent=None): """ Computes the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via @@ -127,7 +127,7 @@ class GeoSQLCompiler(compiler.SQLCompiler): if start_alias: seen = {None: start_alias} for field, model in opts.get_fields_with_model(): - if local_only and model is not None: + if from_parent and model is not None and issubclass(from_parent, model): continue if start_alias: try: diff --git a/django/contrib/gis/geoip/tests.py b/django/contrib/gis/geoip/tests.py index e53230d9ad..c890c4f4ba 100644 --- a/django/contrib/gis/geoip/tests.py +++ b/django/contrib/gis/geoip/tests.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import unicode_literals import os diff --git a/django/contrib/gis/geometry/test_data.py b/django/contrib/gis/geometry/test_data.py index b0f6e1ad57..e13e8589e6 100644 --- a/django/contrib/gis/geometry/test_data.py +++ b/django/contrib/gis/geometry/test_data.py @@ -7,13 +7,14 @@ import os from django.contrib import gis from django.utils import six +from django.utils._os import upath # This global used to store reference geometry data. GEOMETRIES = None # Path where reference test data is located. -TEST_DATA = os.path.join(os.path.dirname(gis.__file__), 'tests', 'data') +TEST_DATA = os.path.join(os.path.dirname(upath(gis.__file__)), 'tests', 'data') def tuplize(seq): diff --git a/django/contrib/gis/geos/mutable_list.py b/django/contrib/gis/geos/mutable_list.py index 820cdfa5a4..0418282bfe 100644 --- a/django/contrib/gis/geos/mutable_list.py +++ b/django/contrib/gis/geos/mutable_list.py @@ -149,27 +149,30 @@ class ListMixin(object): return self def __eq__(self, other): - for i in range(len(self)): + olen = len(other) + for i in range(olen): try: c = self[i] == other[i] - except IndexError: - # must be other is shorter + except self._IndexError: + # self must be shorter return False if not c: return False - return True + return len(self) == olen def __lt__(self, other): - slen = len(self) - for i in range(slen): + olen = len(other) + for i in range(olen): try: c = self[i] < other[i] - except IndexError: - # must be other is shorter - return False + except self._IndexError: + # self must be shorter + return True if c: return c - return slen < len(other) + elif other[i] < self[i]: + return False + return len(self) < olen ### Public list interface Methods ### ## Non-mutating ## diff --git a/django/contrib/gis/geos/tests/test_geos.py b/django/contrib/gis/geos/tests/test_geos.py index 283daa47c0..ec320f94ec 100644 --- a/django/contrib/gis/geos/tests/test_geos.py +++ b/django/contrib/gis/geos/tests/test_geos.py @@ -451,6 +451,21 @@ class GEOSTest(unittest.TestCase, TestDataMixin): self.assertEqual(poly.wkt, Polygon(*tuple(r for r in poly)).wkt) self.assertEqual(poly.wkt, Polygon(*tuple(LinearRing(r.tuple) for r in poly)).wkt) + def test_polygon_comparison(self): + p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) + p2 = Polygon(((0, 0), (0, 1), (1, 0), (0, 0))) + self.assertTrue(p1 > p2) + self.assertFalse(p1 < p2) + self.assertFalse(p2 > p1) + self.assertTrue(p2 < p1) + + p3 = Polygon(((0, 0), (0, 1), (1, 1), (2, 0), (0, 0))) + p4 = Polygon(((0, 0), (0, 1), (2, 2), (1, 0), (0, 0))) + self.assertFalse(p4 < p3) + self.assertTrue(p3 < p4) + self.assertTrue(p4 > p3) + self.assertFalse(p3 > p4) + def test_multipolygons(self): "Testing MultiPolygon objects." prev = fromstr('POINT (0 0)') diff --git a/django/contrib/gis/geos/tests/test_mutable_list.py b/django/contrib/gis/geos/tests/test_mutable_list.py index 675505f0f9..988d8417a2 100644 --- a/django/contrib/gis/geos/tests/test_mutable_list.py +++ b/django/contrib/gis/geos/tests/test_mutable_list.py @@ -363,6 +363,7 @@ class ListMixinTest(unittest.TestCase): pl, ul = self.lists_of_len() self.assertEqual(pl, ul, 'cmp for equal') + self.assertFalse(ul == pl + [2], 'cmp for not equal') self.assertTrue(pl >= ul, 'cmp for gte self') self.assertTrue(pl <= ul, 'cmp for lte self') self.assertTrue(ul >= pl, 'cmp for self gte') @@ -377,6 +378,14 @@ class ListMixinTest(unittest.TestCase): self.assertTrue(ul < pl + [2], 'cmp') self.assertTrue(ul <= pl + [2], 'cmp') + # Also works with a custom IndexError + ul_longer = ul + [2] + ul_longer._IndexError = TypeError + ul._IndexError = TypeError + self.assertFalse(ul_longer == pl) + self.assertFalse(ul == ul_longer) + self.assertTrue(ul_longer > ul) + pl[1] = 20 self.assertTrue(pl > ul, 'cmp for gt self') self.assertTrue(ul < pl, 'cmp for self lt') diff --git a/django/contrib/gis/measure.py b/django/contrib/gis/measure.py index 6e074be355..e2e6b6bca8 100644 --- a/django/contrib/gis/measure.py +++ b/django/contrib/gis/measure.py @@ -151,7 +151,9 @@ class MeasureBase(object): **{self.STANDARD_UNIT: (self.standard / other)}) else: raise TypeError('%(class)s must be divided with number or %(class)s' % {"class":pretty_name(self)}) - __div__ = __truediv__ # Python 2 compatibility + + def __div__(self, other): # Python 2 compatibility + return type(self).__truediv__(self, other) def __itruediv__(self, other): if isinstance(other, NUMERIC_TYPES): @@ -159,11 +161,15 @@ class MeasureBase(object): return self else: raise TypeError('%(class)s must be divided with number' % {"class":pretty_name(self)}) - __idiv__ = __itruediv__ # Python 2 compatibility + + def __idiv__(self, other): # Python 2 compatibility + return type(self).__itruediv__(self, other) def __bool__(self): return bool(self.standard) - __nonzero__ = __bool__ # Python 2 compatibility + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def default_units(self, kwargs): """ @@ -314,7 +320,9 @@ class Area(MeasureBase): **{self.STANDARD_UNIT: (self.standard / other)}) else: raise TypeError('%(class)s must be divided by a number' % {"class":pretty_name(self)}) - __div__ = __truediv__ # Python 2 compatibility + + def __div__(self, other): # Python 2 compatibility + return type(self).__truediv__(self, other) # Shortcuts diff --git a/django/contrib/gis/tests/geo3d/tests.py b/django/contrib/gis/tests/geo3d/tests.py index f7590fe84a..6b40164422 100644 --- a/django/contrib/gis/tests/geo3d/tests.py +++ b/django/contrib/gis/tests/geo3d/tests.py @@ -7,12 +7,13 @@ from django.contrib.gis.db.models import Union, Extent3D from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon from django.contrib.gis.utils import LayerMapping, LayerMapError from django.test import TestCase +from django.utils._os import upath from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D, InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D) -data_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data')) +data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data')) city_file = os.path.join(data_path, 'cities', 'cities.shp') vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt') diff --git a/django/contrib/gis/tests/geogapp/tests.py b/django/contrib/gis/tests/geogapp/tests.py index 2fd3560d0a..a8c607c502 100644 --- a/django/contrib/gis/tests/geogapp/tests.py +++ b/django/contrib/gis/tests/geogapp/tests.py @@ -8,6 +8,7 @@ import os from django.contrib.gis import gdal from django.contrib.gis.measure import D from django.test import TestCase +from django.utils._os import upath from .models import City, County, Zipcode @@ -61,7 +62,7 @@ class GeographyTest(TestCase): from django.contrib.gis.utils import LayerMapping # Getting the shapefile and mapping dictionary. - shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data')) + shp_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data')) co_shp = os.path.join(shp_path, 'counties', 'counties.shp') co_mapping = {'name' : 'Name', 'state' : 'State', diff --git a/django/contrib/gis/tests/layermap/tests.py b/django/contrib/gis/tests/layermap/tests.py index a976954d25..470e5be216 100644 --- a/django/contrib/gis/tests/layermap/tests.py +++ b/django/contrib/gis/tests/layermap/tests.py @@ -13,13 +13,14 @@ from django.db import router from django.conf import settings from django.test import TestCase from django.utils import unittest +from django.utils._os import upath from .models import ( City, County, CountyFeat, Interstate, ICity1, ICity2, Invalid, State, city_mapping, co_mapping, cofeat_mapping, inter_mapping) -shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), os.pardir, 'data')) +shp_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), os.pardir, 'data')) city_shp = os.path.join(shp_path, 'cities', 'cities.shp') co_shp = os.path.join(shp_path, 'counties', 'counties.shp') inter_shp = os.path.join(shp_path, 'interstates', 'interstates.shp') diff --git a/django/contrib/gis/tests/utils.py b/django/contrib/gis/tests/utils.py index a83ba8a93f..8355b27fd7 100644 --- a/django/contrib/gis/tests/utils.py +++ b/django/contrib/gis/tests/utils.py @@ -26,7 +26,7 @@ mysql = _default_db == 'mysql' spatialite = _default_db == 'spatialite' HAS_SPATIALREFSYS = True -if oracle: +if oracle and 'gis' in settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE']: from django.contrib.gis.db.backends.oracle.models import SpatialRefSys elif postgis: from django.contrib.gis.db.backends.postgis.models import SpatialRefSys diff --git a/django/contrib/gis/utils/ogrinspect.py b/django/contrib/gis/utils/ogrinspect.py index 1c870eaa30..08d3b40397 100644 --- a/django/contrib/gis/utils/ogrinspect.py +++ b/django/contrib/gis/utils/ogrinspect.py @@ -123,7 +123,7 @@ def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=Non to the given data source. See the `ogrinspect` docstring for more details. """ # Getting the DataSource - if isinstance(data_source, str): + if isinstance(data_source, six.string_types): data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass diff --git a/django/contrib/humanize/tests.py b/django/contrib/humanize/tests.py index a0f13d3ee9..c648f544d7 100644 --- a/django/contrib/humanize/tests.py +++ b/django/contrib/humanize/tests.py @@ -1,6 +1,12 @@ from __future__ import unicode_literals import datetime +try: + import pytz +except ImportError: + pytz = None + +from django.conf import settings from django.contrib.humanize.templatetags import humanize from django.template import Template, Context, defaultfilters from django.test import TestCase @@ -10,6 +16,7 @@ from django.utils.timezone import utc from django.utils import translation from django.utils.translation import ugettext as _ from django.utils import tzinfo +from django.utils.unittest import skipIf # Mock out datetime in some tests so they don't fail occasionally when they @@ -141,14 +148,16 @@ class HumanizeTests(TestCase): # As 24h of difference they will never be the same self.assertNotEqual(naturalday_one, naturalday_two) + @skipIf(settings.TIME_ZONE != "America/Chicago" and pytz is None, + "this test requires pytz when a non-default time zone is set") def test_naturalday_uses_localtime(self): # Regression for #18504 - # This is 2012-03-08HT19:30:00-06:00 in Ameria/Chicago + # This is 2012-03-08HT19:30:00-06:00 in America/Chicago dt = datetime.datetime(2012, 3, 9, 1, 30, tzinfo=utc) orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: - with override_settings(USE_TZ=True): + with override_settings(TIME_ZONE="America/Chicago", USE_TZ=True): self.humanize_tester([dt], ['yesterday'], 'naturalday') finally: humanize.datetime = orig_humanize_datetime diff --git a/django/contrib/localflavor/ar/forms.py b/django/contrib/localflavor/ar/forms.py index dc4235f9dd..cc6c833de0 100644 --- a/django/contrib/localflavor/ar/forms.py +++ b/django/contrib/localflavor/ar/forms.py @@ -24,7 +24,9 @@ class ARPostalCodeField(RegexField): """ A field that accepts a 'classic' NNNN Postal Code or a CPA. - See http://www.correoargentino.com.ar/consulta_cpa/home.php + See: + http://www.correoargentino.com.ar/cpa/que_es + http://www.correoargentino.com.ar/cpa/como_escribirlo """ default_error_messages = { 'invalid': _("Enter a postal code in the format NNNN or ANNNNAAA."), @@ -120,8 +122,7 @@ class ARCUITField(RegexField): return str(result) def _format(self, cuit, check_digit=None): - if check_digit == None: + if check_digit is None: check_digit = cuit[-1] cuit = cuit[:-1] return '%s-%s-%s' % (cuit[:2], cuit[2:], check_digit) - diff --git a/django/contrib/redirects/tests.py b/django/contrib/redirects/tests.py new file mode 100644 index 0000000000..11ffc7b748 --- /dev/null +++ b/django/contrib/redirects/tests.py @@ -0,0 +1,42 @@ +from django.conf import settings +from django.contrib.sites.models import Site +from django.test import TestCase +from django.test.utils import override_settings +from django.utils import six + +from .models import Redirect + + +@override_settings( + SITE_ID=1, + APPEND_SLASH=True, + MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) + + ['django.contrib.redirects.middleware.RedirectFallbackMiddleware'], +) +class RedirectTests(TestCase): + + def setUp(self): + self.site = Site.objects.get(pk=settings.SITE_ID) + + def test_model(self): + r1 = Redirect.objects.create( + site=self.site, old_path='/initial', new_path='/new_target') + self.assertEqual(six.text_type(r1), "/initial ---> /new_target") + + def test_redirect_middleware(self): + r1 = Redirect.objects.create( + site=self.site, old_path='/initial', new_path='/new_target') + response = self.client.get('/initial') + self.assertRedirects(response, + '/new_target', status_code=301, target_status_code=404) + # Works also with trailing slash + response = self.client.get('/initial/') + self.assertRedirects(response, + '/new_target', status_code=301, target_status_code=404) + + def test_response_gone(self): + """When the redirect target is '', return a 410""" + r1 = Redirect.objects.create( + site=self.site, old_path='/initial', new_path='') + response = self.client.get('/initial') + self.assertEqual(response.status_code, 410) diff --git a/django/contrib/sessions/backends/base.py b/django/contrib/sessions/backends/base.py index c8393f23c6..f79a264500 100644 --- a/django/contrib/sessions/backends/base.py +++ b/django/contrib/sessions/backends/base.py @@ -1,12 +1,12 @@ from __future__ import unicode_literals import base64 -import time from datetime import datetime, timedelta try: from django.utils.six.moves import cPickle as pickle except ImportError: import pickle +import string from django.conf import settings from django.core.exceptions import SuspiciousOperation @@ -16,6 +16,10 @@ from django.utils.crypto import salted_hmac from django.utils import timezone from django.utils.encoding import force_bytes +# session_key should not be case sensitive because some backends can store it +# on case insensitive file systems. +VALID_KEY_CHARS = string.ascii_lowercase + string.digits + class CreateError(Exception): """ Used internally as a consistent exception type to catch from save (see the @@ -133,12 +137,8 @@ class SessionBase(object): def _get_new_session_key(self): "Returns session key that isn't being used." - # Todo: move to 0-9a-z charset in 1.5 - hex_chars = '1234567890abcdef' - # session_key should not be case sensitive because some backends - # can store it on case insensitive file systems. while True: - session_key = get_random_string(32, hex_chars) + session_key = get_random_string(32, VALID_KEY_CHARS) if not self.exists(session_key): break return session_key @@ -170,24 +170,52 @@ class SessionBase(object): _session = property(_get_session) - def get_expiry_age(self): - """Get the number of seconds until the session expires.""" - expiry = self.get('_session_expiry') + def get_expiry_age(self, **kwargs): + """Get the number of seconds until the session expires. + + Optionally, this function accepts `modification` and `expiry` keyword + arguments specifying the modification and expiry of the session. + """ + try: + modification = kwargs['modification'] + except KeyError: + modification = timezone.now() + # Make the difference between "expiry=None passed in kwargs" and + # "expiry not passed in kwargs", in order to guarantee not to trigger + # self.load() when expiry is provided. + try: + expiry = kwargs['expiry'] + except KeyError: + expiry = self.get('_session_expiry') + if not expiry: # Checks both None and 0 cases return settings.SESSION_COOKIE_AGE if not isinstance(expiry, datetime): return expiry - delta = expiry - timezone.now() + delta = expiry - modification return delta.days * 86400 + delta.seconds - def get_expiry_date(self): - """Get session the expiry date (as a datetime object).""" - expiry = self.get('_session_expiry') + def get_expiry_date(self, **kwargs): + """Get session the expiry date (as a datetime object). + + Optionally, this function accepts `modification` and `expiry` keyword + arguments specifying the modification and expiry of the session. + """ + try: + modification = kwargs['modification'] + except KeyError: + modification = timezone.now() + # Same comment as in get_expiry_age + try: + expiry = kwargs['expiry'] + except KeyError: + expiry = self.get('_session_expiry') + if isinstance(expiry, datetime): return expiry if not expiry: # Checks both None and 0 cases expiry = settings.SESSION_COOKIE_AGE - return timezone.now() + timedelta(seconds=expiry) + return modification + timedelta(seconds=expiry) def set_expiry(self, value): """ @@ -281,3 +309,14 @@ class SessionBase(object): Loads the session data and returns a dictionary. """ raise NotImplementedError + + @classmethod + def clear_expired(cls): + """ + Remove expired sessions from the session store. + + If this operation isn't possible on a given backend, it should raise + NotImplementedError. If it isn't necessary, because the backend has + a built-in expiration mechanism, it should be a no-op. + """ + raise NotImplementedError diff --git a/django/contrib/sessions/backends/cache.py b/django/contrib/sessions/backends/cache.py index b66123b915..596042fcb3 100644 --- a/django/contrib/sessions/backends/cache.py +++ b/django/contrib/sessions/backends/cache.py @@ -1,5 +1,6 @@ +from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError -from django.core.cache import cache +from django.core.cache import get_cache from django.utils.six.moves import xrange KEY_PREFIX = "django.contrib.sessions.cache" @@ -10,7 +11,7 @@ class SessionStore(SessionBase): A cache-based session store. """ def __init__(self, session_key=None): - self._cache = cache + self._cache = get_cache(settings.SESSION_CACHE_ALIAS) super(SessionStore, self).__init__(session_key) @property @@ -43,7 +44,9 @@ class SessionStore(SessionBase): continue self.modified = True return - raise RuntimeError("Unable to create a new session key.") + raise RuntimeError( + "Unable to create a new session key. " + "It is likely that the cache is unavailable.") def save(self, must_create=False): if must_create: @@ -65,3 +68,7 @@ class SessionStore(SessionBase): return session_key = self.session_key self._cache.delete(KEY_PREFIX + session_key) + + @classmethod + def clear_expired(cls): + pass diff --git a/django/contrib/sessions/backends/cached_db.py b/django/contrib/sessions/backends/cached_db.py index ff6076df77..31c6fbfce3 100644 --- a/django/contrib/sessions/backends/cached_db.py +++ b/django/contrib/sessions/backends/cached_db.py @@ -2,9 +2,10 @@ Cached, database-backed sessions. """ -from django.conf import settings from django.contrib.sessions.backends.db import SessionStore as DBStore from django.core.cache import cache +from django.core.exceptions import SuspiciousOperation +from django.utils import timezone KEY_PREFIX = "django.contrib.sessions.cached_db" @@ -28,9 +29,21 @@ class SessionStore(DBStore): # Some backends (e.g. memcache) raise an exception on invalid # cache keys. If this happens, reset the session. See #17810. data = None + if data is None: - data = super(SessionStore, self).load() - cache.set(self.cache_key, data, settings.SESSION_COOKIE_AGE) + # Duplicate DBStore.load, because we need to keep track + # of the expiry date to set it properly in the cache. + try: + s = Session.objects.get( + session_key=self.session_key, + expire_date__gt=timezone.now() + ) + data = self.decode(s.session_data) + cache.set(self.cache_key, data, + self.get_expiry_age(expiry=s.expire_date)) + except (Session.DoesNotExist, SuspiciousOperation): + self.create() + data = {} return data def exists(self, session_key): @@ -40,7 +53,7 @@ class SessionStore(DBStore): def save(self, must_create=False): super(SessionStore, self).save(must_create) - cache.set(self.cache_key, self._session, settings.SESSION_COOKIE_AGE) + cache.set(self.cache_key, self._session, self.get_expiry_age()) def delete(self, session_key=None): super(SessionStore, self).delete(session_key) @@ -58,3 +71,7 @@ class SessionStore(DBStore): self.clear() self.delete(self.session_key) self.create() + + +# At bottom to avoid circular import +from django.contrib.sessions.models import Session diff --git a/django/contrib/sessions/backends/db.py b/django/contrib/sessions/backends/db.py index babdb72c27..47e89b66e5 100644 --- a/django/contrib/sessions/backends/db.py +++ b/django/contrib/sessions/backends/db.py @@ -14,7 +14,7 @@ class SessionStore(SessionBase): def load(self): try: s = Session.objects.get( - session_key = self.session_key, + session_key=self.session_key, expire_date__gt=timezone.now() ) return self.decode(s.session_data) @@ -71,6 +71,11 @@ class SessionStore(SessionBase): except Session.DoesNotExist: pass + @classmethod + def clear_expired(cls): + Session.objects.filter(expire_date__lt=timezone.now()).delete() + transaction.commit_unless_managed() + # At bottom to avoid circular import from django.contrib.sessions.models import Session diff --git a/django/contrib/sessions/backends/file.py b/django/contrib/sessions/backends/file.py index 20ac2c2087..7d933c678a 100644 --- a/django/contrib/sessions/backends/file.py +++ b/django/contrib/sessions/backends/file.py @@ -1,32 +1,40 @@ +import datetime import errno import os import tempfile from django.conf import settings -from django.contrib.sessions.backends.base import SessionBase, CreateError +from django.contrib.sessions.backends.base import SessionBase, CreateError, VALID_KEY_CHARS from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured - +from django.utils import timezone class SessionStore(SessionBase): """ Implements a file based session store. """ def __init__(self, session_key=None): - self.storage_path = getattr(settings, "SESSION_FILE_PATH", None) - if not self.storage_path: - self.storage_path = tempfile.gettempdir() - - # Make sure the storage path is valid. - if not os.path.isdir(self.storage_path): - raise ImproperlyConfigured( - "The session storage path %r doesn't exist. Please set your" - " SESSION_FILE_PATH setting to an existing directory in which" - " Django can store session data." % self.storage_path) - + self.storage_path = type(self)._get_storage_path() self.file_prefix = settings.SESSION_COOKIE_NAME super(SessionStore, self).__init__(session_key) - VALID_KEY_CHARS = set("abcdef0123456789") + @classmethod + def _get_storage_path(cls): + try: + return cls._storage_path + except AttributeError: + storage_path = getattr(settings, "SESSION_FILE_PATH", None) + if not storage_path: + storage_path = tempfile.gettempdir() + + # Make sure the storage path is valid. + if not os.path.isdir(storage_path): + raise ImproperlyConfigured( + "The session storage path %r doesn't exist. Please set your" + " SESSION_FILE_PATH setting to an existing directory in which" + " Django can store session data." % storage_path) + + cls._storage_path = storage_path + return storage_path def _key_to_file(self, session_key=None): """ @@ -38,12 +46,24 @@ class SessionStore(SessionBase): # Make sure we're not vulnerable to directory traversal. Session keys # should always be md5s, so they should never contain directory # components. - if not set(session_key).issubset(self.VALID_KEY_CHARS): + if not set(session_key).issubset(set(VALID_KEY_CHARS)): raise SuspiciousOperation( "Invalid characters in session key") return os.path.join(self.storage_path, self.file_prefix + session_key) + def _last_modification(self): + """ + Return the modification time of the file storing the session's content. + """ + modification = os.stat(self._key_to_file()).st_mtime + if settings.USE_TZ: + modification = datetime.datetime.utcfromtimestamp(modification) + modification = modification.replace(tzinfo=timezone.utc) + else: + modification = datetime.datetime.fromtimestamp(modification) + return modification + def load(self): session_data = {} try: @@ -56,6 +76,15 @@ class SessionStore(SessionBase): session_data = self.decode(file_data) except (EOFError, SuspiciousOperation): self.create() + + # Remove expired sessions. + expiry_age = self.get_expiry_age( + modification=self._last_modification(), + expiry=session_data.get('_session_expiry')) + if expiry_age < 0: + session_data = {} + self.delete() + self.create() except IOError: self.create() return session_data @@ -142,3 +171,19 @@ class SessionStore(SessionBase): def clean(self): pass + + @classmethod + def clear_expired(cls): + storage_path = cls._get_storage_path() + file_prefix = settings.SESSION_COOKIE_NAME + + for session_file in os.listdir(storage_path): + if not session_file.startswith(file_prefix): + continue + session_key = session_file[len(file_prefix):] + session = cls(session_key) + # When an expired session is loaded, its file is removed, and a + # new file is immediately created. Prevent this by disabling + # the create() method. + session.create = lambda: None + session.load() diff --git a/django/contrib/sessions/backends/signed_cookies.py b/django/contrib/sessions/backends/signed_cookies.py index 41ba7af634..c2b7a3123f 100644 --- a/django/contrib/sessions/backends/signed_cookies.py +++ b/django/contrib/sessions/backends/signed_cookies.py @@ -32,6 +32,7 @@ class SessionStore(SessionBase): try: return signing.loads(self.session_key, serializer=PickleSerializer, + # This doesn't handle non-default expiry dates, see #19201 max_age=settings.SESSION_COOKIE_AGE, salt='django.contrib.sessions.backends.signed_cookies') except (signing.BadSignature, ValueError): @@ -91,3 +92,7 @@ class SessionStore(SessionBase): return signing.dumps(session_cache, compress=True, salt='django.contrib.sessions.backends.signed_cookies', serializer=PickleSerializer) + + @classmethod + def clear_expired(cls): + pass diff --git a/tests/modeltests/pagination/__init__.py b/django/contrib/sessions/management/__init__.py similarity index 100% rename from tests/modeltests/pagination/__init__.py rename to django/contrib/sessions/management/__init__.py diff --git a/tests/regressiontests/pagination_regress/__init__.py b/django/contrib/sessions/management/commands/__init__.py similarity index 100% rename from tests/regressiontests/pagination_regress/__init__.py rename to django/contrib/sessions/management/commands/__init__.py diff --git a/django/contrib/sessions/management/commands/clearsessions.py b/django/contrib/sessions/management/commands/clearsessions.py new file mode 100644 index 0000000000..8eb23dfee0 --- /dev/null +++ b/django/contrib/sessions/management/commands/clearsessions.py @@ -0,0 +1,15 @@ +from django.conf import settings +from django.core.management.base import NoArgsCommand +from django.utils.importlib import import_module + + +class Command(NoArgsCommand): + help = "Can be run as a cronjob or directly to clean out expired sessions (only with the database backend at the moment)." + + def handle_noargs(self, **options): + engine = import_module(settings.SESSION_ENGINE) + try: + engine.SessionStore.clear_expired() + except NotImplementedError: + self.stderr.write("Session engine '%s' doesn't support clearing " + "expired sessions.\n" % settings.SESSION_ENGINE) diff --git a/django/contrib/sessions/tests.py b/django/contrib/sessions/tests.py index fc2d8753d7..da79ac9de6 100644 --- a/django/contrib/sessions/tests.py +++ b/django/contrib/sessions/tests.py @@ -1,4 +1,5 @@ from datetime import timedelta +import os import shutil import string import tempfile @@ -12,7 +13,8 @@ from django.contrib.sessions.backends.file import SessionStore as FileSession from django.contrib.sessions.backends.signed_cookies import SessionStore as CookieSession from django.contrib.sessions.models import Session from django.contrib.sessions.middleware import SessionMiddleware -from django.core.cache import DEFAULT_CACHE_ALIAS +from django.core.cache import get_cache +from django.core import management from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.http import HttpResponse from django.test import TestCase, RequestFactory @@ -83,7 +85,7 @@ class SessionTestsMixin(object): self.session['some key'] = 1 self.session.modified = False self.session.accessed = False - self.assertTrue('some key' in self.session) + self.assertIn('some key', self.session) self.assertTrue(self.session.accessed) self.assertFalse(self.session.modified) @@ -134,8 +136,8 @@ class SessionTestsMixin(object): self.assertTrue(self.session.modified) def test_save(self): - if (hasattr(self.session, '_cache') and - 'DummyCache' in settings.CACHES[DEFAULT_CACHE_ALIAS]['BACKEND']): + if (hasattr(self.session, '_cache') and'DummyCache' in + settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']): raise unittest.SkipTest("Session saving tests require a real cache backend") self.session.save() self.assertTrue(self.session.exists(self.session.session_key)) @@ -197,31 +199,43 @@ class SessionTestsMixin(object): self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_custom_expiry_seconds(self): - # Using seconds - self.session.set_expiry(10) - delta = self.session.get_expiry_date() - timezone.now() - self.assertTrue(delta.seconds in (9, 10)) + modification = timezone.now() - age = self.session.get_expiry_age() - self.assertTrue(age in (9, 10)) + self.session.set_expiry(10) + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) def test_custom_expiry_timedelta(self): - # Using timedelta - self.session.set_expiry(timedelta(seconds=10)) - delta = self.session.get_expiry_date() - timezone.now() - self.assertTrue(delta.seconds in (9, 10)) + modification = timezone.now() - age = self.session.get_expiry_age() - self.assertTrue(age in (9, 10)) + # Mock timezone.now, because set_expiry calls it on this code path. + original_now = timezone.now + try: + timezone.now = lambda: modification + self.session.set_expiry(timedelta(seconds=10)) + finally: + timezone.now = original_now + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) def test_custom_expiry_datetime(self): - # Using fixed datetime - self.session.set_expiry(timezone.now() + timedelta(seconds=10)) - delta = self.session.get_expiry_date() - timezone.now() - self.assertTrue(delta.seconds in (9, 10)) + modification = timezone.now() - age = self.session.get_expiry_age() - self.assertTrue(age in (9, 10)) + self.session.set_expiry(modification + timedelta(seconds=10)) + + date = self.session.get_expiry_date(modification=modification) + self.assertEqual(date, modification + timedelta(seconds=10)) + + age = self.session.get_expiry_age(modification=modification) + self.assertEqual(age, 10) def test_custom_expiry_reset(self): self.session.set_expiry(None) @@ -258,6 +272,23 @@ class SessionTestsMixin(object): encoded = self.session.encode(data) self.assertEqual(self.session.decode(encoded), data) + def test_actual_expiry(self): + # Regression test for #19200 + old_session_key = None + new_session_key = None + try: + self.session['foo'] = 'bar' + self.session.set_expiry(-timedelta(seconds=10)) + self.session.save() + old_session_key = self.session.session_key + # With an expiry date in the past, the session expires instantly. + new_session = self.backend(self.session.session_key) + new_session_key = new_session.session_key + self.assertNotIn('foo', new_session) + finally: + self.session.delete(old_session_key) + self.session.delete(new_session_key) + class DatabaseSessionTests(SessionTestsMixin, TestCase): @@ -290,6 +321,30 @@ class DatabaseSessionTests(SessionTestsMixin, TestCase): del self.session._session_cache self.assertEqual(self.session['y'], 2) + @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.db") + def test_clearsessions_command(self): + """ + Test clearsessions command for clearing expired sessions. + """ + self.assertEqual(0, Session.objects.count()) + + # One object in the future + self.session['foo'] = 'bar' + self.session.set_expiry(3600) + self.session.save() + + # One object in the past + other_session = self.backend() + other_session['foo'] = 'bar' + other_session.set_expiry(-3600) + other_session.save() + + # Two sessions are in the database before clearsessions... + self.assertEqual(2, Session.objects.count()) + management.call_command('clearsessions') + # ... and one is deleted. + self.assertEqual(1, Session.objects.count()) + @override_settings(USE_TZ=True) class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests): @@ -300,7 +355,8 @@ class CacheDBSessionTests(SessionTestsMixin, TestCase): backend = CacheDBSession - @unittest.skipIf('DummyCache' in settings.CACHES[DEFAULT_CACHE_ALIAS]['BACKEND'], + @unittest.skipIf('DummyCache' in + settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND'], "Session saving tests require a real cache backend") def test_exists_searches_cache_first(self): self.session.save() @@ -326,19 +382,23 @@ class FileSessionTests(SessionTestsMixin, unittest.TestCase): backend = FileSession def setUp(self): - super(FileSessionTests, self).setUp() # Do file session tests in an isolated directory, and kill it after we're done. self.original_session_file_path = settings.SESSION_FILE_PATH self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp() + # Reset the file session backend's internal caches + if hasattr(self.backend, '_storage_path'): + del self.backend._storage_path + super(FileSessionTests, self).setUp() def tearDown(self): + super(FileSessionTests, self).tearDown() settings.SESSION_FILE_PATH = self.original_session_file_path shutil.rmtree(self.temp_session_store) - super(FileSessionTests, self).tearDown() @override_settings( SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer") def test_configuration_check(self): + del self.backend._storage_path # Make sure the file backend checks for a good storage dir self.assertRaises(ImproperlyConfigured, self.backend) @@ -352,6 +412,37 @@ class FileSessionTests(SessionTestsMixin, unittest.TestCase): self.assertRaises(SuspiciousOperation, self.backend("a/b/c").load) + @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file") + def test_clearsessions_command(self): + """ + Test clearsessions command for clearing expired sessions. + """ + storage_path = self.backend._get_storage_path() + file_prefix = settings.SESSION_COOKIE_NAME + + def count_sessions(): + return len([session_file for session_file in os.listdir(storage_path) + if session_file.startswith(file_prefix)]) + + self.assertEqual(0, count_sessions()) + + # One object in the future + self.session['foo'] = 'bar' + self.session.set_expiry(3600) + self.session.save() + + # One object in the past + other_session = self.backend() + other_session['foo'] = 'bar' + other_session.set_expiry(-3600) + other_session.save() + + # Two sessions are in the filesystem before clearsessions... + self.assertEqual(2, count_sessions()) + management.call_command('clearsessions') + # ... and one is deleted. + self.assertEqual(1, count_sessions()) + class CacheSessionTests(SessionTestsMixin, unittest.TestCase): @@ -364,6 +455,23 @@ class CacheSessionTests(SessionTestsMixin, unittest.TestCase): self.session._session_key = (string.ascii_letters + string.digits) * 20 self.assertEqual(self.session.load(), {}) + def test_default_cache(self): + self.session.save() + self.assertNotEqual(get_cache('default').get(self.session.cache_key), None) + + @override_settings(CACHES={ + 'default': { + 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + }, + 'sessions': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, + }, SESSION_CACHE_ALIAS='sessions') + def test_non_default_cache(self): + self.session.save() + self.assertEqual(get_cache('default').get(self.session.cache_key), None) + self.assertNotEqual(get_cache('sessions').get(self.session.cache_key), None) + class SessionMiddlewareTests(unittest.TestCase): @@ -452,3 +560,8 @@ class CookieSessionTests(SessionTestsMixin, TestCase): testing for this behavior is meaningless. """ pass + + @unittest.expectedFailure + def test_actual_expiry(self): + # The cookie backend doesn't handle non-default expiry dates, see #19201 + super(CookieSessionTests, self).test_actual_expiry() diff --git a/django/contrib/sitemaps/tests/http.py b/django/contrib/sitemaps/tests/http.py index 99042fef03..4a1cf66b17 100644 --- a/django/contrib/sitemaps/tests/http.py +++ b/django/contrib/sitemaps/tests/http.py @@ -11,6 +11,7 @@ from django.core.exceptions import ImproperlyConfigured from django.test.utils import override_settings from django.utils.unittest import skipUnless from django.utils.formats import localize +from django.utils._os import upath from django.utils.translation import activate, deactivate from .base import SitemapTestsBase @@ -29,7 +30,7 @@ class HTTPSitemapTests(SitemapTestsBase): self.assertXMLEqual(response.content.decode('utf-8'), expected_content) @override_settings( - TEMPLATE_DIRS=(os.path.join(os.path.dirname(__file__), 'templates'),) + TEMPLATE_DIRS=(os.path.join(os.path.dirname(upath(__file__)), 'templates'),) ) def test_simple_sitemap_custom_index(self): "A simple sitemap index can be rendered with a custom template" @@ -64,7 +65,7 @@ class HTTPSitemapTests(SitemapTestsBase): self.assertXMLEqual(response.content.decode('utf-8'), expected_content) @override_settings( - TEMPLATE_DIRS=(os.path.join(os.path.dirname(__file__), 'templates'),) + TEMPLATE_DIRS=(os.path.join(os.path.dirname(upath(__file__)), 'templates'),) ) def test_simple_custom_sitemap(self): "A simple sitemap can be rendered with a custom template" diff --git a/django/contrib/staticfiles/storage.py b/django/contrib/staticfiles/storage.py index 9691b7849d..7e87a89f5b 100644 --- a/django/contrib/staticfiles/storage.py +++ b/django/contrib/staticfiles/storage.py @@ -19,6 +19,7 @@ from django.utils.datastructures import SortedDict from django.utils.encoding import force_bytes, force_text from django.utils.functional import LazyObject from django.utils.importlib import import_module +from django.utils._os import upath from django.contrib.staticfiles.utils import check_settings, matches_patterns @@ -296,7 +297,7 @@ class AppStaticStorage(FileSystemStorage): """ # app is the actual app module mod = import_module(app) - mod_path = os.path.dirname(mod.__file__) + mod_path = os.path.dirname(upath(mod.__file__)) location = os.path.join(mod_path, self.source_dir) super(AppStaticStorage, self).__init__(location, *args, **kwargs) diff --git a/django/core/cache/__init__.py b/django/core/cache/__init__.py index f496c35e2b..562bcc21bb 100644 --- a/django/core/cache/__init__.py +++ b/django/core/cache/__init__.py @@ -93,8 +93,6 @@ def parse_backend_conf(backend, **kwargs): raise InvalidCacheBackendError("Could not find backend '%s'" % backend) location = kwargs.pop('LOCATION', '') return backend, location, kwargs - raise InvalidCacheBackendError( - "Couldn't find a cache backend named '%s'" % backend) def get_cache(backend, **kwargs): """ @@ -136,11 +134,9 @@ def get_cache(backend, **kwargs): "Could not find backend '%s': %s" % (backend, e)) cache = backend_cls(location, params) # Some caches -- python-memcached in particular -- need to do a cleanup at the - # end of a request cycle. If the cache provides a close() method, wire it up - # here. - if hasattr(cache, 'close'): - signals.request_finished.connect(cache.close) + # end of a request cycle. If not implemented in a particular backend + # cache.close is a no-op + signals.request_finished.connect(cache.close) return cache cache = get_cache(DEFAULT_CACHE_ALIAS) - diff --git a/django/core/cache/backends/base.py b/django/core/cache/backends/base.py index 06e8952bfb..7234d3c4db 100644 --- a/django/core/cache/backends/base.py +++ b/django/core/cache/backends/base.py @@ -6,15 +6,19 @@ import warnings from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning from django.utils.importlib import import_module + class InvalidCacheBackendError(ImproperlyConfigured): pass + class CacheKeyWarning(DjangoRuntimeWarning): pass + # Memcached does not accept keys longer than this. MEMCACHE_MAX_KEY_LENGTH = 250 + def default_key_func(key, key_prefix, version): """ Default function to generate keys. @@ -23,7 +27,8 @@ def default_key_func(key, key_prefix, version): the `key_prefix'. KEY_FUNCTION can be used to specify an alternate function with custom key making behavior. """ - return ':'.join([key_prefix, str(version), key]) + return '%s:%s:%s' % (key_prefix, version, key) + def get_key_func(key_func): """ @@ -40,6 +45,7 @@ def get_key_func(key_func): return getattr(key_func_module, key_func_name) return default_key_func + class BaseCache(object): def __init__(self, params): timeout = params.get('timeout', params.get('TIMEOUT', 300)) @@ -221,3 +227,7 @@ class BaseCache(object): the new version. """ return self.incr_version(key, -delta, version) + + def close(self, **kwargs): + """Close the cache connection""" + pass diff --git a/django/core/cache/backends/db.py b/django/core/cache/backends/db.py index 348b03f733..c93bc90b18 100644 --- a/django/core/cache/backends/db.py +++ b/django/core/cache/backends/db.py @@ -11,7 +11,7 @@ except ImportError: from django.conf import settings from django.core.cache.backends.base import BaseCache from django.db import connections, router, transaction, DatabaseError -from django.utils import timezone +from django.utils import timezone, six from django.utils.encoding import force_bytes @@ -104,7 +104,11 @@ class DatabaseCache(BaseDatabaseCache): if num > self._max_entries: self._cull(db, cursor, now) pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) - encoded = base64.b64encode(pickled).strip() + b64encoded = base64.b64encode(pickled) + # The DB column is expecting a string, so make sure the value is a + # string, not bytes. Refs #19274. + if six.PY3: + b64encoded = b64encoded.decode('latin1') cursor.execute("SELECT cache_key, expires FROM %s " "WHERE cache_key = %%s" % table, [key]) try: @@ -113,11 +117,11 @@ class DatabaseCache(BaseDatabaseCache): (mode == 'add' and result[1] < now)): cursor.execute("UPDATE %s SET value = %%s, expires = %%s " "WHERE cache_key = %%s" % table, - [encoded, connections[db].ops.value_to_db_datetime(exp), key]) + [b64encoded, connections[db].ops.value_to_db_datetime(exp), key]) else: cursor.execute("INSERT INTO %s (cache_key, value, expires) " "VALUES (%%s, %%s, %%s)" % table, - [key, encoded, connections[db].ops.value_to_db_datetime(exp)]) + [key, b64encoded, connections[db].ops.value_to_db_datetime(exp)]) except DatabaseError: # To be threadsafe, updates/inserts are allowed to fail silently transaction.rollback_unless_managed(using=db) diff --git a/django/core/cache/backends/memcached.py b/django/core/cache/backends/memcached.py index 9bb47c8344..2c3f198847 100644 --- a/django/core/cache/backends/memcached.py +++ b/django/core/cache/backends/memcached.py @@ -90,6 +90,9 @@ class BaseMemcachedCache(BaseCache): def incr(self, key, delta=1, version=None): key = self.make_key(key, version=version) + # memcached doesn't support a negative delta + if delta < 0: + return self._cache.decr(key, -delta) try: val = self._cache.incr(key, delta) @@ -105,6 +108,9 @@ class BaseMemcachedCache(BaseCache): def decr(self, key, delta=1, version=None): key = self.make_key(key, version=version) + # memcached doesn't support a negative delta + if delta < 0: + return self._cache.incr(key, -delta) try: val = self._cache.decr(key, delta) diff --git a/django/core/files/base.py b/django/core/files/base.py index b81e180292..2d10100019 100644 --- a/django/core/files/base.py +++ b/django/core/files/base.py @@ -6,7 +6,7 @@ from io import BytesIO, StringIO, UnsupportedOperation from django.utils.encoding import smart_text from django.core.files.utils import FileProxyMixin from django.utils import six -from django.utils.encoding import python_2_unicode_compatible +from django.utils.encoding import force_bytes, python_2_unicode_compatible @python_2_unicode_compatible class File(FileProxyMixin): @@ -28,7 +28,9 @@ class File(FileProxyMixin): def __bool__(self): return bool(self.name) - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def __len__(self): return self.size @@ -132,8 +134,11 @@ class ContentFile(File): A File-like object that takes just raw content, rather than an actual file. """ def __init__(self, content, name=None): - content = content or b'' - stream_class = StringIO if isinstance(content, six.text_type) else BytesIO + if six.PY3: + stream_class = StringIO if isinstance(content, six.text_type) else BytesIO + else: + stream_class = BytesIO + content = force_bytes(content) super(ContentFile, self).__init__(stream_class(content), name=name) self.size = len(content) @@ -142,7 +147,9 @@ class ContentFile(File): def __bool__(self): return True - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def open(self, mode=None): self.seek(0) diff --git a/django/core/handlers/base.py b/django/core/handlers/base.py index 23572465cf..0caf6b29fa 100644 --- a/django/core/handlers/base.py +++ b/django/core/handlers/base.py @@ -134,7 +134,7 @@ class BaseHandler(object): raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name)) # If the response supports deferred rendering, apply template - # response middleware and the render the response + # response middleware and then render the response if hasattr(response, 'render') and callable(response.render): for middleware_method in self._template_response_middleware: response = middleware_method(request, response) diff --git a/django/core/handlers/wsgi.py b/django/core/handlers/wsgi.py index 45cb2268ed..4c0710549a 100644 --- a/django/core/handlers/wsgi.py +++ b/django/core/handlers/wsgi.py @@ -1,5 +1,6 @@ from __future__ import unicode_literals +import codecs import logging import sys from io import BytesIO @@ -144,6 +145,14 @@ class WSGIRequest(http.HttpRequest): self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() + _, content_params = self._parse_content_type(self.META.get('CONTENT_TYPE', '')) + if 'charset' in content_params: + try: + codecs.lookup(content_params['charset']) + except LookupError: + pass + else: + self.encoding = content_params['charset'] self._post_parse_error = False try: content_length = int(self.environ.get('CONTENT_LENGTH')) @@ -155,6 +164,21 @@ class WSGIRequest(http.HttpRequest): def _is_secure(self): return 'wsgi.url_scheme' in self.environ and self.environ['wsgi.url_scheme'] == 'https' + def _parse_content_type(self, ctype): + """ + Media Types parsing according to RFC 2616, section 3.7. + + Returns the data type and parameters. For example: + Input: "text/plain; charset=iso-8859-1" + Output: ('text/plain', {'charset': 'iso-8859-1'}) + """ + content_type, _, params = ctype.partition(';') + content_params = {} + for parameter in params.split(';'): + k, _, v = parameter.strip().partition('=') + content_params[k] = v + return content_type, content_params + def _get_request(self): if not hasattr(self, '_request'): self._request = datastructures.MergeDict(self.POST, self.GET) diff --git a/django/core/mail/backends/smtp.py b/django/core/mail/backends/smtp.py index 18437c6282..b6f7f560ed 100644 --- a/django/core/mail/backends/smtp.py +++ b/django/core/mail/backends/smtp.py @@ -7,6 +7,7 @@ from django.conf import settings from django.core.mail.backends.base import BaseEmailBackend from django.core.mail.utils import DNS_NAME from django.core.mail.message import sanitize_address +from django.utils.encoding import force_bytes class EmailBackend(BaseEmailBackend): @@ -102,9 +103,11 @@ class EmailBackend(BaseEmailBackend): from_email = sanitize_address(email_message.from_email, email_message.encoding) recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.recipients()] + message = email_message.message() + charset = message.get_charset().get_output_charset() if message.get_charset() else 'utf-8' try: self.connection.sendmail(from_email, recipients, - email_message.message().as_string()) + force_bytes(message.as_string(), charset)) except: if not self.fail_silently: raise diff --git a/django/core/management/__init__.py b/django/core/management/__init__.py index c61ab2b663..fab5059376 100644 --- a/django/core/management/__init__.py +++ b/django/core/management/__init__.py @@ -9,6 +9,7 @@ from django.core.exceptions import ImproperlyConfigured from django.core.management.base import BaseCommand, CommandError, handle_default_options from django.core.management.color import color_style from django.utils.importlib import import_module +from django.utils._os import upath from django.utils import six # For backwards compatibility: get_version() used to be in this module. @@ -136,14 +137,15 @@ def call_command(name, *args, **options): # Load the command object. try: app_name = get_commands()[name] - if isinstance(app_name, BaseCommand): - # If the command is already loaded, use it directly. - klass = app_name - else: - klass = load_command_class(app_name, name) except KeyError: raise CommandError("Unknown command: %r" % name) + if isinstance(app_name, BaseCommand): + # If the command is already loaded, use it directly. + klass = app_name + else: + klass = load_command_class(app_name, name) + # Grab out a list of defaults from the options. optparse does this for us # when the script runs from the command line, but since call_command can # be called programatically, we need to simulate the loading and handling @@ -409,10 +411,10 @@ def setup_environ(settings_mod, original_settings_path=None): # Add this project to sys.path so that it's importable in the conventional # way. For example, if this file (manage.py) lives in a directory # "myproject", this code would add "/path/to/myproject" to sys.path. - if '__init__.py' in settings_mod.__file__: - p = os.path.dirname(settings_mod.__file__) + if '__init__.py' in upath(settings_mod.__file__): + p = os.path.dirname(upath(settings_mod.__file__)) else: - p = settings_mod.__file__ + p = upath(settings_mod.__file__) project_directory, settings_filename = os.path.split(p) if project_directory == os.curdir or not project_directory: project_directory = os.getcwd() diff --git a/django/core/management/commands/cleanup.py b/django/core/management/commands/cleanup.py index e19d1649be..f83c64be8f 100644 --- a/django/core/management/commands/cleanup.py +++ b/django/core/management/commands/cleanup.py @@ -1,11 +1,11 @@ -from django.core.management.base import NoArgsCommand -from django.utils import timezone +import warnings -class Command(NoArgsCommand): - help = "Can be run as a cronjob or directly to clean out old data from the database (only expired sessions at the moment)." +from django.contrib.sessions.management.commands import clearsessions + +class Command(clearsessions.Command): def handle_noargs(self, **options): - from django.db import transaction - from django.contrib.sessions.models import Session - Session.objects.filter(expire_date__lt=timezone.now()).delete() - transaction.commit_unless_managed() + warnings.warn( + "The `cleanup` command has been deprecated in favor of `clearsessions`.", + PendingDeprecationWarning) + super(Command, self).handle_noargs(**options) diff --git a/django/core/management/commands/compilemessages.py b/django/core/management/commands/compilemessages.py index b7392b9173..e1d8a33332 100644 --- a/django/core/management/commands/compilemessages.py +++ b/django/core/management/commands/compilemessages.py @@ -5,6 +5,7 @@ import os import sys from optparse import make_option from django.core.management.base import BaseCommand, CommandError +from django.utils._os import npath def has_bom(fn): with open(fn, 'rb') as f: @@ -41,8 +42,8 @@ def compile_messages(stderr, locale=None): # command, so that we can take advantage of shell quoting, to # quote any malicious characters/escaping. # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html - os.environ['djangocompilemo'] = pf + '.mo' - os.environ['djangocompilepo'] = pf + '.po' + os.environ['djangocompilemo'] = npath(pf + '.mo') + os.environ['djangocompilepo'] = npath(pf + '.po') if sys.platform == 'win32': # Different shell-variable syntax cmd = 'msgfmt --check-format -o "%djangocompilemo%" "%djangocompilepo%"' else: diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 32ae8abf5a..ed47b8fbf1 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -1,11 +1,9 @@ from __future__ import unicode_literals -import sys import os import gzip import zipfile from optparse import make_option -import traceback from django.conf import settings from django.core import serializers @@ -15,6 +13,7 @@ from django.db import (connections, router, transaction, DEFAULT_DB_ALIAS, IntegrityError, DatabaseError) from django.db.models import get_apps from django.utils.encoding import force_text +from django.utils._os import upath from itertools import product try: @@ -39,10 +38,10 @@ class Command(BaseCommand): def handle(self, *fixture_labels, **options): - ignore = options.get('ignore') - using = options.get('database') + self.ignore = options.get('ignore') + self.using = options.get('database') - connection = connections[using] + connection = connections[self.using] if not len(fixture_labels): raise CommandError( @@ -50,8 +49,7 @@ class Command(BaseCommand): "least one fixture in the command line." ) - verbosity = int(options.get('verbosity')) - show_traceback = options.get('traceback') + self.verbosity = int(options.get('verbosity')) # commit is a stealth option - it isn't really useful as # a command line option, but it can be useful when invoking @@ -62,12 +60,10 @@ class Command(BaseCommand): commit = options.get('commit', True) # Keep a count of the installed objects and fixtures - fixture_count = 0 - loaded_object_count = 0 - fixture_object_count = 0 - models = set() - - humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path' + self.fixture_count = 0 + self.loaded_object_count = 0 + self.fixture_object_count = 0 + self.models = set() # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if @@ -77,9 +73,9 @@ class Command(BaseCommand): # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. if commit: - transaction.commit_unless_managed(using=using) - transaction.enter_transaction_management(using=using) - transaction.managed(True, using=using) + transaction.commit_unless_managed(using=self.using) + transaction.enter_transaction_management(using=self.using) + transaction.managed(True, using=self.using) class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): @@ -89,136 +85,34 @@ class Command(BaseCommand): def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) - compression_types = { + self.compression_types = { None: open, 'gz': gzip.GzipFile, 'zip': SingleZipReader } if has_bz2: - compression_types['bz2'] = bz2.BZ2File + self.compression_types['bz2'] = bz2.BZ2File app_module_paths = [] for app in get_apps(): if hasattr(app, '__path__'): # It's a 'models/' subpackage for path in app.__path__: - app_module_paths.append(path) + app_module_paths.append(upath(path)) else: # It's a models.py module - app_module_paths.append(app.__file__) + app_module_paths.append(upath(app.__file__)) app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths] try: with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: - parts = fixture_label.split('.') - - if len(parts) > 1 and parts[-1] in compression_types: - compression_formats = [parts[-1]] - parts = parts[:-1] - else: - compression_formats = compression_types.keys() - - if len(parts) == 1: - fixture_name = parts[0] - formats = serializers.get_public_serializer_formats() - else: - fixture_name, format = '.'.join(parts[:-1]), parts[-1] - if format in serializers.get_public_serializer_formats(): - formats = [format] - else: - formats = [] - - if formats: - if verbosity >= 2: - self.stdout.write("Loading '%s' fixtures..." % fixture_name) - else: - raise CommandError( - "Problem installing fixture '%s': %s is not a known serialization format." % - (fixture_name, format)) - - if os.path.isabs(fixture_name): - fixture_dirs = [fixture_name] - else: - fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + [''] - - for fixture_dir in fixture_dirs: - if verbosity >= 2: - self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) - - label_found = False - for combo in product([using, None], formats, compression_formats): - database, format, compression_format = combo - file_name = '.'.join( - p for p in [ - fixture_name, database, format, compression_format - ] - if p - ) - - if verbosity >= 3: - self.stdout.write("Trying %s for %s fixture '%s'..." % \ - (humanize(fixture_dir), file_name, fixture_name)) - full_path = os.path.join(fixture_dir, file_name) - open_method = compression_types[compression_format] - try: - fixture = open_method(full_path, 'r') - except IOError: - if verbosity >= 2: - self.stdout.write("No %s fixture '%s' in %s." % \ - (format, fixture_name, humanize(fixture_dir))) - else: - try: - if label_found: - raise CommandError("Multiple fixtures named '%s' in %s. Aborting." % - (fixture_name, humanize(fixture_dir))) - - fixture_count += 1 - objects_in_fixture = 0 - loaded_objects_in_fixture = 0 - if verbosity >= 2: - self.stdout.write("Installing %s fixture '%s' from %s." % \ - (format, fixture_name, humanize(fixture_dir))) - - objects = serializers.deserialize(format, fixture, using=using, ignorenonexistent=ignore) - - for obj in objects: - objects_in_fixture += 1 - if router.allow_syncdb(using, obj.object.__class__): - loaded_objects_in_fixture += 1 - models.add(obj.object.__class__) - try: - obj.save(using=using) - except (DatabaseError, IntegrityError) as e: - e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { - 'app_label': obj.object._meta.app_label, - 'object_name': obj.object._meta.object_name, - 'pk': obj.object.pk, - 'error_msg': force_text(e) - },) - raise - - loaded_object_count += loaded_objects_in_fixture - fixture_object_count += objects_in_fixture - label_found = True - except Exception as e: - if not isinstance(e, CommandError): - e.args = ("Problem installing fixture '%s': %s" % (full_path, e),) - raise - finally: - fixture.close() - - # If the fixture we loaded contains 0 objects, assume that an - # error was encountered during fixture loading. - if objects_in_fixture == 0: - raise CommandError( - "No fixture data found for '%s'. (File format may be invalid.)" % - (fixture_name)) + self.load_label(fixture_label, app_fixtures) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added - table_names = [model._meta.db_table for model in models] + table_names = [model._meta.db_table for model in self.models] try: connection.check_constraints(table_names=table_names) except Exception as e: @@ -229,31 +123,31 @@ class Command(BaseCommand): raise except Exception as e: if commit: - transaction.rollback(using=using) - transaction.leave_transaction_management(using=using) + transaction.rollback(using=self.using) + transaction.leave_transaction_management(using=self.using) raise # If we found even one object in a fixture, we need to reset the # database sequences. - if loaded_object_count > 0: - sequence_sql = connection.ops.sequence_reset_sql(no_style(), models) + if self.loaded_object_count > 0: + sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models) if sequence_sql: - if verbosity >= 2: + if self.verbosity >= 2: self.stdout.write("Resetting sequences\n") for line in sequence_sql: cursor.execute(line) if commit: - transaction.commit(using=using) - transaction.leave_transaction_management(using=using) + transaction.commit(using=self.using) + transaction.leave_transaction_management(using=self.using) - if verbosity >= 1: - if fixture_object_count == loaded_object_count: + if self.verbosity >= 1: + if self.fixture_object_count == self.loaded_object_count: self.stdout.write("Installed %d object(s) from %d fixture(s)" % ( - loaded_object_count, fixture_count)) + self.loaded_object_count, self.fixture_count)) else: self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % ( - loaded_object_count, fixture_object_count, fixture_count)) + self.loaded_object_count, self.fixture_object_count, self.fixture_count)) # Close the DB connection. This is required as a workaround for an # edge case in MySQL: if the same connection is used to @@ -261,3 +155,117 @@ class Command(BaseCommand): # incorrect results. See Django #7572, MySQL #37735. if commit: connection.close() + + def load_label(self, fixture_label, app_fixtures): + + parts = fixture_label.split('.') + + if len(parts) > 1 and parts[-1] in self.compression_types: + compression_formats = [parts[-1]] + parts = parts[:-1] + else: + compression_formats = self.compression_types.keys() + + if len(parts) == 1: + fixture_name = parts[0] + formats = serializers.get_public_serializer_formats() + else: + fixture_name, format = '.'.join(parts[:-1]), parts[-1] + if format in serializers.get_public_serializer_formats(): + formats = [format] + else: + formats = [] + + if formats: + if self.verbosity >= 2: + self.stdout.write("Loading '%s' fixtures..." % fixture_name) + else: + raise CommandError( + "Problem installing fixture '%s': %s is not a known serialization format." % + (fixture_name, format)) + + if os.path.isabs(fixture_name): + fixture_dirs = [fixture_name] + else: + fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + [''] + + for fixture_dir in fixture_dirs: + self.process_dir(fixture_dir, fixture_name, compression_formats, + formats) + + def process_dir(self, fixture_dir, fixture_name, compression_formats, + serialization_formats): + + humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path' + + if self.verbosity >= 2: + self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) + + label_found = False + for combo in product([self.using, None], serialization_formats, compression_formats): + database, format, compression_format = combo + file_name = '.'.join( + p for p in [ + fixture_name, database, format, compression_format + ] + if p + ) + + if self.verbosity >= 3: + self.stdout.write("Trying %s for %s fixture '%s'..." % \ + (humanize(fixture_dir), file_name, fixture_name)) + full_path = os.path.join(fixture_dir, file_name) + open_method = self.compression_types[compression_format] + try: + fixture = open_method(full_path, 'r') + except IOError: + if self.verbosity >= 2: + self.stdout.write("No %s fixture '%s' in %s." % \ + (format, fixture_name, humanize(fixture_dir))) + else: + try: + if label_found: + raise CommandError("Multiple fixtures named '%s' in %s. Aborting." % + (fixture_name, humanize(fixture_dir))) + + self.fixture_count += 1 + objects_in_fixture = 0 + loaded_objects_in_fixture = 0 + if self.verbosity >= 2: + self.stdout.write("Installing %s fixture '%s' from %s." % \ + (format, fixture_name, humanize(fixture_dir))) + + objects = serializers.deserialize(format, fixture, using=self.using, ignorenonexistent=self.ignore) + + for obj in objects: + objects_in_fixture += 1 + if router.allow_syncdb(self.using, obj.object.__class__): + loaded_objects_in_fixture += 1 + self.models.add(obj.object.__class__) + try: + obj.save(using=self.using) + except (DatabaseError, IntegrityError) as e: + e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { + 'app_label': obj.object._meta.app_label, + 'object_name': obj.object._meta.object_name, + 'pk': obj.object.pk, + 'error_msg': force_text(e) + },) + raise + + self.loaded_object_count += loaded_objects_in_fixture + self.fixture_object_count += objects_in_fixture + label_found = True + except Exception as e: + if not isinstance(e, CommandError): + e.args = ("Problem installing fixture '%s': %s" % (full_path, e),) + raise + finally: + fixture.close() + + # If the fixture we loaded contains 0 objects, assume that an + # error was encountered during fixture loading. + if objects_in_fixture == 0: + raise CommandError( + "No fixture data found for '%s'. (File format may be invalid.)" % + (fixture_name)) diff --git a/django/core/management/commands/makemessages.py b/django/core/management/commands/makemessages.py index 81c4fdf8cc..606cbe0b85 100644 --- a/django/core/management/commands/makemessages.py +++ b/django/core/management/commands/makemessages.py @@ -301,7 +301,7 @@ def make_messages(locale=None, domain='django', verbosity=1, all=False, locales = [] if locale is not None: - locales.append(locale) + locales.append(str(locale)) elif all: locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir)) locales = [os.path.basename(l) for l in locale_dirs] @@ -316,8 +316,8 @@ def make_messages(locale=None, domain='django', verbosity=1, all=False, if not os.path.isdir(basedir): os.makedirs(basedir) - pofile = os.path.join(basedir, '%s.po' % domain) - potfile = os.path.join(basedir, '%s.pot' % domain) + pofile = os.path.join(basedir, '%s.po' % str(domain)) + potfile = os.path.join(basedir, '%s.pot' % str(domain)) if os.path.exists(potfile): os.unlink(potfile) diff --git a/django/core/management/sql.py b/django/core/management/sql.py index 78cd17a23a..e46f4ae4f5 100644 --- a/django/core/management/sql.py +++ b/django/core/management/sql.py @@ -8,6 +8,7 @@ from django.conf import settings from django.core.management.base import CommandError from django.db import models from django.db.models import get_models +from django.utils._os import upath def sql_create(app, style, connection): @@ -145,21 +146,21 @@ def sql_all(app, style, connection): def _split_statements(content): comment_re = re.compile(r"^((?:'[^']*'|[^'])*?)--.*$") statements = [] - statement = "" + statement = [] for line in content.split("\n"): cleaned_line = comment_re.sub(r"\1", line).strip() if not cleaned_line: continue - statement += cleaned_line - if statement.endswith(";"): - statements.append(statement) - statement = "" + statement.append(cleaned_line) + if cleaned_line.endswith(";"): + statements.append(" ".join(statement)) + statement = [] return statements def custom_sql_for_model(model, style, connection): opts = model._meta - app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql')) + app_dir = os.path.normpath(os.path.join(os.path.dirname(upath(models.get_app(model._meta.app_label).__file__)), 'sql')) output = [] # Post-creation SQL should come before any initial SQL data is loaded. diff --git a/django/core/management/templates.py b/django/core/management/templates.py index d34a0deb7e..f522097b8c 100644 --- a/django/core/management/templates.py +++ b/django/core/management/templates.py @@ -8,8 +8,6 @@ import shutil import stat import sys import tempfile -import codecs - try: from urllib.request import urlretrieve except ImportError: # Python 2 @@ -156,12 +154,14 @@ class TemplateCommand(BaseCommand): # Only render the Python files, as we don't want to # accidentally render Django templates files - with codecs.open(old_path, 'r', 'utf-8') as template_file: + with open(old_path, 'rb') as template_file: content = template_file.read() if filename.endswith(extensions) or filename in extra_files: + content = content.decode('utf-8') template = Template(content) content = template.render(context) - with codecs.open(new_path, 'w', 'utf-8') as new_file: + content = content.encode('utf-8') + with open(new_path, 'wb') as new_file: new_file.write(content) if self.verbosity >= 2: diff --git a/django/core/management/validation.py b/django/core/management/validation.py index 957a712b72..c0452c5aa6 100644 --- a/django/core/management/validation.py +++ b/django/core/management/validation.py @@ -1,3 +1,4 @@ +import collections import sys from django.conf import settings @@ -34,7 +35,7 @@ def get_validation_errors(outfile, app=None): for (app_name, error) in get_app_errors().items(): e.add(app_name, error) - for cls in models.get_models(app): + for cls in models.get_models(app, include_swapped=True): opts = cls._meta # Check swappable attribute. @@ -137,16 +138,17 @@ def get_validation_errors(outfile, app=None): # fields, m2m fields, m2m related objects or related objects if f.rel: if f.rel.to not in models.get_models(): - e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) + # If the related model is swapped, provide a hint; + # otherwise, the model just hasn't been installed. + if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped: + e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) + else: + e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) # it is a string and we could not find the model it refers to # so skip the next section if isinstance(f.rel.to, six.string_types): continue - # Make sure the model we're related hasn't been swapped out - if f.rel.to._meta.swapped: - e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) - # Make sure the related field specified by a ForeignKey is unique if not f.rel.to._meta.get_field(f.rel.field_name).unique: e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.rel.field_name, f.rel.to.__name__)) @@ -183,16 +185,18 @@ def get_validation_errors(outfile, app=None): # existing fields, m2m fields, m2m related objects or related # objects if f.rel.to not in models.get_models(): - e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) + # If the related model is swapped, provide a hint; + # otherwise, the model just hasn't been installed. + if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped: + e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) + else: + e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) + # it is a string and we could not find the model it refers to # so skip the next section if isinstance(f.rel.to, six.string_types): continue - # Make sure the model we're related hasn't been swapped out - if f.rel.to._meta.swapped: - e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) - # Check that the field is not set to unique. ManyToManyFields do not support unique. if f.unique: e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name) @@ -327,15 +331,29 @@ def get_validation_errors(outfile, app=None): # Check unique_together. for ut in opts.unique_together: - for field_name in ut: - try: - f = opts.get_field(field_name, many_to_many=True) - except models.FieldDoesNotExist: - e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name) - else: - if isinstance(f.rel, models.ManyToManyRel): - e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name) - if f not in opts.local_fields: - e.add(opts, '"unique_together" refers to %s. This is not in the same model as the unique_together statement.' % f.name) + validate_local_fields(e, opts, "unique_together", ut) + if not isinstance(opts.index_together, collections.Sequence): + e.add(opts, '"index_together" must a sequence') + else: + for it in opts.index_together: + validate_local_fields(e, opts, "index_together", it) return len(e.errors) + + +def validate_local_fields(e, opts, field_name, fields): + from django.db import models + + if not isinstance(fields, collections.Sequence): + e.add(opts, 'all %s elements must be sequences' % field_name) + else: + for field in fields: + try: + f = opts.get_field(field, many_to_many=True) + except models.FieldDoesNotExist: + e.add(opts, '"%s" refers to %s, a field that doesn\'t exist.' % (field_name, field)) + else: + if isinstance(f.rel, models.ManyToManyRel): + e.add(opts, '"%s" refers to %s. ManyToManyFields are not supported in %s.' % (field_name, f.name, field_name)) + if f not in opts.local_fields: + e.add(opts, '"%s" refers to %s. This is not in the same model as the %s statement.' % (field_name, f.name, field_name)) diff --git a/django/core/paginator.py b/django/core/paginator.py index 6b0b3542f8..9ccff51a34 100644 --- a/django/core/paginator.py +++ b/django/core/paginator.py @@ -1,16 +1,25 @@ +import collections from math import ceil +from django.utils import six + + class InvalidPage(Exception): pass + class PageNotAnInteger(InvalidPage): pass + class EmptyPage(InvalidPage): pass + class Paginator(object): - def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True): + + def __init__(self, object_list, per_page, orphans=0, + allow_empty_first_page=True): self.object_list = object_list self.per_page = int(per_page) self.orphans = int(orphans) @@ -18,7 +27,9 @@ class Paginator(object): self._num_pages = self._count = None def validate_number(self, number): - "Validates the given 1-based page number." + """ + Validates the given 1-based page number. + """ try: number = int(number) except (TypeError, ValueError): @@ -33,16 +44,29 @@ class Paginator(object): return number def page(self, number): - "Returns a Page object for the given 1-based page number." + """ + Returns a Page object for the given 1-based page number. + """ number = self.validate_number(number) bottom = (number - 1) * self.per_page top = bottom + self.per_page if top + self.orphans >= self.count: top = self.count - return Page(self.object_list[bottom:top], number, self) + return self._get_page(self.object_list[bottom:top], number, self) + + def _get_page(self, *args, **kwargs): + """ + Returns an instance of a single page. + + This hook can be used by subclasses to use an alternative to the + standard :cls:`Page` object. + """ + return Page(*args, **kwargs) def _get_count(self): - "Returns the total number of objects, across all pages." + """ + Returns the total number of objects, across all pages. + """ if self._count is None: try: self._count = self.object_list.count() @@ -55,7 +79,9 @@ class Paginator(object): count = property(_get_count) def _get_num_pages(self): - "Returns the total number of pages." + """ + Returns the total number of pages. + """ if self._num_pages is None: if self.count == 0 and not self.allow_empty_first_page: self._num_pages = 0 @@ -73,9 +99,12 @@ class Paginator(object): return range(1, self.num_pages + 1) page_range = property(_get_page_range) -QuerySetPaginator = Paginator # For backwards-compatibility. -class Page(object): +QuerySetPaginator = Paginator # For backwards-compatibility. + + +class Page(collections.Sequence): + def __init__(self, object_list, number, paginator): self.object_list = object_list self.number = number @@ -88,40 +117,12 @@ class Page(object): return len(self.object_list) def __getitem__(self, index): + if not isinstance(index, (slice,) + six.integer_types): + raise TypeError # The object_list is converted to a list so that if it was a QuerySet # it won't be a database hit per __getitem__. return list(self.object_list)[index] - # The following four methods are only necessary for Python <2.6 - # compatibility (this class could just extend 2.6's collections.Sequence). - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def index(self, value): - for i, v in enumerate(self): - if v == value: - return i - raise ValueError - - def count(self, value): - return sum([1 for v in self if v == value]) - - # End of compatibility methods. - def has_next(self): return self.number < self.paginator.num_pages diff --git a/django/core/serializers/base.py b/django/core/serializers/base.py index 276f9a4738..294934a04a 100644 --- a/django/core/serializers/base.py +++ b/django/core/serializers/base.py @@ -112,7 +112,7 @@ class Serializer(object): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() -class Deserializer(object): +class Deserializer(six.Iterator): """ Abstract base deserializer class. """ @@ -138,8 +138,6 @@ class Deserializer(object): """Iteration iterface -- return the next item in the stream""" raise NotImplementedError - next = __next__ # Python 2 compatibility - class DeserializedObject(object): """ A deserialized model. diff --git a/django/core/serializers/xml_serializer.py b/django/core/serializers/xml_serializer.py index 666587dc77..ea333a22bd 100644 --- a/django/core/serializers/xml_serializer.py +++ b/django/core/serializers/xml_serializer.py @@ -161,8 +161,6 @@ class Deserializer(base.Deserializer): return self._handle_object(node) raise StopIteration - next = __next__ # Python 2 compatibility - def _handle_object(self, node): """ Convert an node to a DeserializedObject. diff --git a/django/core/servers/basehttp.py b/django/core/servers/basehttp.py index a7004f2c2f..7387d13199 100644 --- a/django/core/servers/basehttp.py +++ b/django/core/servers/basehttp.py @@ -138,6 +138,10 @@ class WSGIRequestHandler(simple_server.WSGIRequestHandler, object): self.style = color_style() super(WSGIRequestHandler, self).__init__(*args, **kwargs) + def address_string(self): + # Short-circuit parent method to not call socket.getfqdn + return self.client_address[0] + def log_message(self, format, *args): # Don't bother logging requests for admin images or the favicon. if (self.path.startswith(self.admin_static_prefix) diff --git a/django/core/signing.py b/django/core/signing.py index 147e54780c..92ab968123 100644 --- a/django/core/signing.py +++ b/django/core/signing.py @@ -97,10 +97,10 @@ class JSONSerializer(object): signing.loads. """ def dumps(self, obj): - return json.dumps(obj, separators=(',', ':')) + return json.dumps(obj, separators=(',', ':')).encode('latin-1') def loads(self, data): - return json.loads(data) + return json.loads(data.decode('latin-1')) def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, compress=False): @@ -116,8 +116,10 @@ def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, only valid for a given namespace. Leaving this at the default value or re-using a salt value across different parts of your application without good cause is a security risk. + + The serializer is expected to return a bytestring. """ - data = force_bytes(serializer().dumps(obj)) + data = serializer().dumps(obj) # Flag for if it's been compressed or not is_compressed = False @@ -136,20 +138,22 @@ def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, max_age=None): """ - Reverse of dumps(), raises BadSignature if signature fails + Reverse of dumps(), raises BadSignature if signature fails. + + The serializer is expected to accept a bytestring. """ # TimestampSigner.unsign always returns unicode but base64 and zlib # compression operate on bytes. base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age)) decompress = False - if base64d[0] == b'.': + if base64d[:1] == b'.': # It's compressed; uncompress it first base64d = base64d[1:] decompress = True data = b64_decode(base64d) if decompress: data = zlib.decompress(data) - return serializer().loads(force_str(data)) + return serializer().loads(data) class Signer(object): diff --git a/django/core/urlresolvers.py b/django/core/urlresolvers.py index af3df83d0a..c657fd9a54 100644 --- a/django/core/urlresolvers.py +++ b/django/core/urlresolvers.py @@ -16,6 +16,7 @@ from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_str, force_text, iri_to_uri from django.utils.functional import memoize, lazy +from django.utils.http import urlquote from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule from django.utils.regex_helper import normalize @@ -250,9 +251,9 @@ class RegexURLResolver(LocaleRegexProvider): urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__ else: urlconf_repr = repr(self.urlconf_name) - return force_str('<%s %s (%s:%s) %s>' % ( + return str('<%s %s (%s:%s) %s>') % ( self.__class__.__name__, urlconf_repr, self.app_name, - self.namespace, self.regex.pattern)) + self.namespace, self.regex.pattern) def _populate(self): lookups = MultiValueDict() @@ -379,14 +380,15 @@ class RegexURLResolver(LocaleRegexProvider): except (ImportError, AttributeError) as e: raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e)) possibilities = self.reverse_dict.getlist(lookup_view) - prefix_norm, prefix_args = normalize(_prefix)[0] + + prefix_norm, prefix_args = normalize(urlquote(_prefix))[0] for possibility, pattern, defaults in possibilities: for result, params in possibility: if args: if len(args) != len(params) + len(prefix_args): continue unicode_args = [force_text(val) for val in args] - candidate = (prefix_norm + result) % dict(zip(prefix_args + params, unicode_args)) + candidate = (prefix_norm + result) % dict(zip(prefix_args + params, unicode_args)) else: if set(kwargs.keys()) | set(defaults.keys()) != set(params) | set(defaults.keys()) | set(prefix_args): continue @@ -398,8 +400,8 @@ class RegexURLResolver(LocaleRegexProvider): if not matches: continue unicode_kwargs = dict([(k, force_text(v)) for (k, v) in kwargs.items()]) - candidate = (prefix_norm + result) % unicode_kwargs - if re.search('^%s%s' % (_prefix, pattern), candidate, re.UNICODE): + candidate = (prefix_norm.replace('%', '%%') + result) % unicode_kwargs + if re.search('^%s%s' % (prefix_norm, pattern), candidate, re.UNICODE): return candidate # lookup_view can be URL label, or dotted path, or callable, Any of # these can be passed in at the top, but callables are not friendly in diff --git a/django/db/__init__.py b/django/db/__init__.py index 26c7add0af..b1980488df 100644 --- a/django/db/__init__.py +++ b/django/db/__init__.py @@ -8,7 +8,7 @@ __all__ = ('backend', 'connection', 'connections', 'router', 'DatabaseError', 'IntegrityError', 'DEFAULT_DB_ALIAS') -if DEFAULT_DB_ALIAS not in settings.DATABASES: +if settings.DATABASES and DEFAULT_DB_ALIAS not in settings.DATABASES: raise ImproperlyConfigured("You must define a '%s' database" % DEFAULT_DB_ALIAS) connections = ConnectionHandler(settings.DATABASES) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 28024c6428..0b5ba55cd0 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -181,8 +181,6 @@ class BaseDatabaseWrapper(object): """ if self.transaction_state: return self.transaction_state[-1] - # Note that this setting isn't documented, and is only used here, and - # in enter_transaction_management() return settings.TRANSACTIONS_MANAGED def managed(self, flag=True): @@ -904,16 +902,14 @@ class BaseDatabaseOperations(object): Coerce the value returned by the database backend into a consistent type that is compatible with the field type. """ - internal_type = field.get_internal_type() - if internal_type == 'DecimalField': + if value is None: return value - elif internal_type == 'FloatField': + internal_type = field.get_internal_type() + if internal_type == 'FloatField': return float(value) elif (internal_type and (internal_type.endswith('IntegerField') or internal_type == 'AutoField')): return int(value) - elif internal_type in ('DateField', 'DateTimeField', 'TimeField'): - return value return value def check_aggregate_support(self, aggregate_func): @@ -935,6 +931,11 @@ class BaseDatabaseOperations(object): conn = ' %s ' % connector return conn.join(sub_expressions) + def modify_insert_params(self, placeholders, params): + """Allow modification of insert parameters. Needed for Oracle Spatial + backend due to #10888. + """ + return params class BaseDatabaseIntrospection(object): """ diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 69eee35352..9c3095d0aa 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -179,34 +179,47 @@ class BaseDatabaseCreation(object): output = [] for f in model._meta.local_fields: output.extend(self.sql_indexes_for_field(model, f, style)) + for fs in model._meta.index_together: + fields = [model._meta.get_field_by_name(f)[0] for f in fs] + output.extend(self.sql_indexes_for_fields(model, fields, style)) return output def sql_indexes_for_field(self, model, f, style): """ Return the CREATE INDEX SQL statements for a single model field. """ + if f.db_index and not f.unique: + return self.sql_indexes_for_fields(model, [f], style) + else: + return [] + + def sql_indexes_for_fields(self, model, fields, style): from django.db.backends.util import truncate_name - if f.db_index and not f.unique: - qn = self.connection.ops.quote_name - tablespace = f.db_tablespace or model._meta.db_tablespace - if tablespace: - tablespace_sql = self.connection.ops.tablespace_sql(tablespace) - if tablespace_sql: - tablespace_sql = ' ' + tablespace_sql - else: - tablespace_sql = '' - i_name = '%s_%s' % (model._meta.db_table, self._digest(f.column)) - output = [style.SQL_KEYWORD('CREATE INDEX') + ' ' + - style.SQL_TABLE(qn(truncate_name( - i_name, self.connection.ops.max_name_length()))) + ' ' + - style.SQL_KEYWORD('ON') + ' ' + - style.SQL_TABLE(qn(model._meta.db_table)) + ' ' + - "(%s)" % style.SQL_FIELD(qn(f.column)) + - "%s;" % tablespace_sql] + if len(fields) == 1 and fields[0].db_tablespace: + tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace) + elif model._meta.db_tablespace: + tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) else: - output = [] - return output + tablespace_sql = "" + if tablespace_sql: + tablespace_sql = " " + tablespace_sql + + field_names = [] + qn = self.connection.ops.quote_name + for f in fields: + field_names.append(style.SQL_FIELD(qn(f.column))) + + index_name = "%s_%s" % (model._meta.db_table, self._digest([f.name for f in fields])) + + return [ + style.SQL_KEYWORD("CREATE INDEX") + " " + + style.SQL_TABLE(qn(truncate_name(index_name, self.connection.ops.max_name_length()))) + " " + + style.SQL_KEYWORD("ON") + " " + + style.SQL_TABLE(qn(model._meta.db_table)) + " " + + "(%s)" % style.SQL_FIELD(", ".join(field_names)) + + "%s;" % tablespace_sql, + ] def sql_destroy_model(self, model, references_to_delete, style): """ @@ -270,6 +283,7 @@ class BaseDatabaseCreation(object): self._create_test_db(verbosity, autoclobber) self.connection.close() + settings.DATABASES[self.connection.alias]["NAME"] = test_database_name self.connection.settings_dict["NAME"] = test_database_name # Report syncdb messages at one level lower than that requested. diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py index 797406859c..f782ca80ae 100644 --- a/django/db/backends/mysql/base.py +++ b/django/db/backends/mysql/base.py @@ -374,47 +374,56 @@ class DatabaseWrapper(BaseDatabaseWrapper): self.connection.ping() return True except DatabaseError: - self.connection.close() - self.connection = None + self.close() return False + def get_connection_params(self): + kwargs = { + 'conv': django_conversions, + 'charset': 'utf8', + 'use_unicode': True, + } + settings_dict = self.settings_dict + if settings_dict['USER']: + kwargs['user'] = settings_dict['USER'] + if settings_dict['NAME']: + kwargs['db'] = settings_dict['NAME'] + if settings_dict['PASSWORD']: + kwargs['passwd'] = force_str(settings_dict['PASSWORD']) + if settings_dict['HOST'].startswith('/'): + kwargs['unix_socket'] = settings_dict['HOST'] + elif settings_dict['HOST']: + kwargs['host'] = settings_dict['HOST'] + if settings_dict['PORT']: + kwargs['port'] = int(settings_dict['PORT']) + # We need the number of potentially affected rows after an + # "UPDATE", not the number of changed rows. + kwargs['client_flag'] = CLIENT.FOUND_ROWS + kwargs.update(settings_dict['OPTIONS']) + return kwargs + + def get_new_connection(self, conn_params): + conn = Database.connect(**conn_params) + conn.encoders[SafeText] = conn.encoders[six.text_type] + conn.encoders[SafeBytes] = conn.encoders[bytes] + return conn + + def init_connection_state(self): + cursor = self.connection.cursor() + # SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column + # on a recently-inserted row will return when the field is tested for + # NULL. Disabling this value brings this aspect of MySQL in line with + # SQL standards. + cursor.execute('SET SQL_AUTO_IS_NULL = 0') + cursor.close() + def _cursor(self): - new_connection = False if not self._valid_connection(): - new_connection = True - kwargs = { - 'conv': django_conversions, - 'charset': 'utf8', - 'use_unicode': True, - } - settings_dict = self.settings_dict - if settings_dict['USER']: - kwargs['user'] = settings_dict['USER'] - if settings_dict['NAME']: - kwargs['db'] = settings_dict['NAME'] - if settings_dict['PASSWORD']: - kwargs['passwd'] = force_str(settings_dict['PASSWORD']) - if settings_dict['HOST'].startswith('/'): - kwargs['unix_socket'] = settings_dict['HOST'] - elif settings_dict['HOST']: - kwargs['host'] = settings_dict['HOST'] - if settings_dict['PORT']: - kwargs['port'] = int(settings_dict['PORT']) - # We need the number of potentially affected rows after an - # "UPDATE", not the number of changed rows. - kwargs['client_flag'] = CLIENT.FOUND_ROWS - kwargs.update(settings_dict['OPTIONS']) - self.connection = Database.connect(**kwargs) - self.connection.encoders[SafeText] = self.connection.encoders[six.text_type] - self.connection.encoders[SafeBytes] = self.connection.encoders[bytes] + conn_params = self.get_connection_params() + self.connection = self.get_new_connection(conn_params) + self.init_connection_state() connection_created.send(sender=self.__class__, connection=self) cursor = self.connection.cursor() - if new_connection: - # SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column - # on a recently-inserted row will return when the field is tested for - # NULL. Disabling this value brings this aspect of MySQL in line with - # SQL standards. - cursor.execute('SET SQL_AUTO_IS_NULL = 0') return CursorWrapper(cursor) def _rollback(self): @@ -435,8 +444,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): server_info = self.connection.get_server_info() if new_connection: # Make sure we close the connection - self.connection.close() - self.connection = None + self.close() m = server_version_re.match(server_info) if not m: raise Exception('Unable to determine MySQL version from version string %r' % server_info) diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index f4b11ec327..129b5d827b 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -257,6 +257,10 @@ WHEN (new.%(col_name)s IS NULL) if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % util.truncate_name(name.upper(), self.max_name_length()) + # Oracle puts the query text into a (query % args) construct, so % signs + # in names need to be escaped. The '%%' will be collapsed back to '%' at + # that stage so we aren't really making the name longer here. + name = name.replace('%','%%') return name.upper() def random_function_sql(self): @@ -486,66 +490,78 @@ class DatabaseWrapper(BaseDatabaseWrapper): return "%s/%s@%s" % (settings_dict['USER'], settings_dict['PASSWORD'], dsn) + def create_cursor(self, conn): + return FormatStylePlaceholderCursor(conn) + + def get_connection_params(self): + conn_params = self.settings_dict['OPTIONS'].copy() + if 'use_returning_into' in conn_params: + del conn_params['use_returning_into'] + return conn_params + + def get_new_connection(self, conn_params): + conn_string = convert_unicode(self._connect_string()) + return Database.connect(conn_string, **conn_params) + + def init_connection_state(self): + cursor = self.create_cursor(self.connection) + # Set the territory first. The territory overrides NLS_DATE_FORMAT + # and NLS_TIMESTAMP_FORMAT to the territory default. When all of + # these are set in single statement it isn't clear what is supposed + # to happen. + cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") + # Set oracle date to ansi date format. This only needs to execute + # once when we create a new connection. We also set the Territory + # to 'AMERICA' which forces Sunday to evaluate to a '1' in + # TO_CHAR(). + cursor.execute( + "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" + " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" + + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')) + cursor.close() + if 'operators' not in self.__dict__: + # Ticket #14149: Check whether our LIKE implementation will + # work for this connection or we need to fall back on LIKEC. + # This check is performed only once per DatabaseWrapper + # instance per thread, since subsequent connections will use + # the same settings. + cursor = self.create_cursor(self.connection) + try: + cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s" + % self._standard_operators['contains'], + ['X']) + except utils.DatabaseError: + self.operators = self._likec_operators + else: + self.operators = self._standard_operators + cursor.close() + + try: + self.oracle_version = int(self.connection.version.split('.')[0]) + # There's no way for the DatabaseOperations class to know the + # currently active Oracle version, so we do some setups here. + # TODO: Multi-db support will need a better solution (a way to + # communicate the current version). + if self.oracle_version <= 9: + self.ops.regex_lookup = self.ops.regex_lookup_9 + else: + self.ops.regex_lookup = self.ops.regex_lookup_10 + except ValueError: + pass + try: + self.connection.stmtcachesize = 20 + except: + # Django docs specify cx_Oracle version 4.3.1 or higher, but + # stmtcachesize is available only in 4.3.2 and up. + pass + def _cursor(self): - cursor = None if not self._valid_connection(): - conn_string = convert_unicode(self._connect_string()) - conn_params = self.settings_dict['OPTIONS'].copy() - if 'use_returning_into' in conn_params: - del conn_params['use_returning_into'] - self.connection = Database.connect(conn_string, **conn_params) - cursor = FormatStylePlaceholderCursor(self.connection) - # Set the territory first. The territory overrides NLS_DATE_FORMAT - # and NLS_TIMESTAMP_FORMAT to the territory default. When all of - # these are set in single statement it isn't clear what is supposed - # to happen. - cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") - # Set oracle date to ansi date format. This only needs to execute - # once when we create a new connection. We also set the Territory - # to 'AMERICA' which forces Sunday to evaluate to a '1' in - # TO_CHAR(). - cursor.execute( - "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" - " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" - + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')) - - if 'operators' not in self.__dict__: - # Ticket #14149: Check whether our LIKE implementation will - # work for this connection or we need to fall back on LIKEC. - # This check is performed only once per DatabaseWrapper - # instance per thread, since subsequent connections will use - # the same settings. - try: - cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s" - % self._standard_operators['contains'], - ['X']) - except utils.DatabaseError: - self.operators = self._likec_operators - else: - self.operators = self._standard_operators - - try: - self.oracle_version = int(self.connection.version.split('.')[0]) - # There's no way for the DatabaseOperations class to know the - # currently active Oracle version, so we do some setups here. - # TODO: Multi-db support will need a better solution (a way to - # communicate the current version). - if self.oracle_version <= 9: - self.ops.regex_lookup = self.ops.regex_lookup_9 - else: - self.ops.regex_lookup = self.ops.regex_lookup_10 - except ValueError: - pass - try: - self.connection.stmtcachesize = 20 - except: - # Django docs specify cx_Oracle version 4.3.1 or higher, but - # stmtcachesize is available only in 4.3.2 and up. - pass + conn_params = self.get_connection_params() + self.connection = self.get_new_connection(conn_params) + self.init_connection_state() connection_created.send(sender=self.__class__, connection=self) - if not cursor: - cursor = FormatStylePlaceholderCursor(self.connection) - return cursor + return self.create_cursor(self.connection) # Oracle doesn't support savepoint commits. Ignore them. def _savepoint_commit(self, sid): @@ -775,7 +791,7 @@ class FormatStylePlaceholderCursor(object): return CursorIterator(self.cursor) -class CursorIterator(object): +class CursorIterator(six.Iterator): """Cursor iterator wrapper that invokes our custom row factory.""" @@ -789,8 +805,6 @@ class CursorIterator(object): def __next__(self): return _rowfactory(next(self.iter), self.cursor) - next = __next__ # Python 2 compatibility - def _rowfactory(row, cursor): # Cast numeric values as the appropriate Python type based upon the diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py index c4dc80503c..50e8d655a0 100644 --- a/django/db/backends/oracle/creation.py +++ b/django/db/backends/oracle/creation.py @@ -111,10 +111,12 @@ class DatabaseCreation(BaseDatabaseCreation): print("Tests cancelled.") sys.exit(1) - self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER'] - self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD'] - self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = TEST_USER - self.connection.settings_dict['PASSWORD'] = TEST_PASSWD + from django.db import settings + real_settings = settings.DATABASES[self.connection.alias] + real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER'] + real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD'] + real_settings['TEST_USER'] = real_settings['USER'] = self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = TEST_USER + real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = TEST_PASSWD return self.connection.settings_dict['NAME'] diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index cd26098caa..b9addfc926 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -159,48 +159,59 @@ class DatabaseWrapper(BaseDatabaseWrapper): return self._pg_version pg_version = property(_get_pg_version) - def _cursor(self): + def get_connection_params(self): settings_dict = self.settings_dict - if self.connection is None: - if not settings_dict['NAME']: - from django.core.exceptions import ImproperlyConfigured - raise ImproperlyConfigured( - "settings.DATABASES is improperly configured. " - "Please supply the NAME value.") - conn_params = { - 'database': settings_dict['NAME'], - } - conn_params.update(settings_dict['OPTIONS']) - if 'autocommit' in conn_params: - del conn_params['autocommit'] - if settings_dict['USER']: - conn_params['user'] = settings_dict['USER'] - if settings_dict['PASSWORD']: - conn_params['password'] = force_str(settings_dict['PASSWORD']) - if settings_dict['HOST']: - conn_params['host'] = settings_dict['HOST'] - if settings_dict['PORT']: - conn_params['port'] = settings_dict['PORT'] - self.connection = Database.connect(**conn_params) - self.connection.set_client_encoding('UTF8') - tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE') - if tz: - try: - get_parameter_status = self.connection.get_parameter_status - except AttributeError: - # psycopg2 < 2.0.12 doesn't have get_parameter_status - conn_tz = None - else: - conn_tz = get_parameter_status('TimeZone') + if not settings_dict['NAME']: + from django.core.exceptions import ImproperlyConfigured + raise ImproperlyConfigured( + "settings.DATABASES is improperly configured. " + "Please supply the NAME value.") + conn_params = { + 'database': settings_dict['NAME'], + } + conn_params.update(settings_dict['OPTIONS']) + if 'autocommit' in conn_params: + del conn_params['autocommit'] + if settings_dict['USER']: + conn_params['user'] = settings_dict['USER'] + if settings_dict['PASSWORD']: + conn_params['password'] = force_str(settings_dict['PASSWORD']) + if settings_dict['HOST']: + conn_params['host'] = settings_dict['HOST'] + if settings_dict['PORT']: + conn_params['port'] = settings_dict['PORT'] + return conn_params - if conn_tz != tz: - # Set the time zone in autocommit mode (see #17062) - self.connection.set_isolation_level( - psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - self.connection.cursor().execute( - self.ops.set_time_zone_sql(), [tz]) - self.connection.set_isolation_level(self.isolation_level) - self._get_pg_version() + def get_new_connection(self, conn_params): + return Database.connect(**conn_params) + + def init_connection_state(self): + settings_dict = self.settings_dict + self.connection.set_client_encoding('UTF8') + tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE') + if tz: + try: + get_parameter_status = self.connection.get_parameter_status + except AttributeError: + # psycopg2 < 2.0.12 doesn't have get_parameter_status + conn_tz = None + else: + conn_tz = get_parameter_status('TimeZone') + + if conn_tz != tz: + # Set the time zone in autocommit mode (see #17062) + self.connection.set_isolation_level( + psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + self.connection.cursor().execute( + self.ops.set_time_zone_sql(), [tz]) + self.connection.set_isolation_level(self.isolation_level) + self._get_pg_version() + + def _cursor(self): + if self.connection is None: + conn_params = self.get_connection_params() + self.connection = self.get_new_connection(conn_params) + self.init_connection_state() connection_created.send(sender=self.__class__, connection=self) cursor = self.connection.cursor() cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index 341fc13e81..aedf953cfd 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -123,8 +123,12 @@ class DatabaseOperations(BaseDatabaseOperations): """ SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of 999 variables per query. + + If there is just single field to insert, then we can hit another + limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500. """ - return (999 // len(fields)) if len(fields) > 0 else len(objs) + limit = 999 if len(fields) > 1 else 500 + return (limit // len(fields)) if len(fields) > 0 else len(objs) def date_extract_sql(self, lookup_type, field_name): # sqlite doesn't support extract, so we fake it with the user-defined @@ -230,7 +234,7 @@ class DatabaseOperations(BaseDatabaseOperations): res.append("SELECT %s" % ", ".join( "%%s AS %s" % self.quote_name(f.column) for f in fields )) - res.extend(["UNION SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1)) + res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1)) return " ".join(res) class DatabaseWrapper(BaseDatabaseWrapper): @@ -265,7 +269,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) - def _sqlite_create_connection(self): + def get_connection_params(self): settings_dict = self.settings_dict if not settings_dict['NAME']: from django.core.exceptions import ImproperlyConfigured @@ -292,12 +296,24 @@ class DatabaseWrapper(BaseDatabaseWrapper): RuntimeWarning ) kwargs.update({'check_same_thread': False}) - self.connection = Database.connect(**kwargs) + return kwargs + + def get_new_connection(self, conn_params): + conn = Database.connect(**conn_params) # Register extract, date_trunc, and regexp functions. - self.connection.create_function("django_extract", 2, _sqlite_extract) - self.connection.create_function("django_date_trunc", 2, _sqlite_date_trunc) - self.connection.create_function("regexp", 2, _sqlite_regexp) - self.connection.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta) + conn.create_function("django_extract", 2, _sqlite_extract) + conn.create_function("django_date_trunc", 2, _sqlite_date_trunc) + conn.create_function("regexp", 2, _sqlite_regexp) + conn.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta) + return conn + + def init_connection_state(self): + pass + + def _sqlite_create_connection(self): + conn_params = self.get_connection_params() + self.connection = self.get_new_connection(conn_params) + self.init_connection_state() connection_created.send(sender=self.__class__, connection=self) def _cursor(self): diff --git a/django/db/backends/util.py b/django/db/backends/util.py index e029c42899..1ba23060e0 100644 --- a/django/db/backends/util.py +++ b/django/db/backends/util.py @@ -24,11 +24,9 @@ class CursorWrapper(object): self.db.set_dirty() def __getattr__(self, attr): - self.set_dirty() - if attr in self.__dict__: - return self.__dict__[attr] - else: - return getattr(self.cursor, attr) + if attr in ('execute', 'executemany', 'callproc'): + self.set_dirty() + return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) diff --git a/django/db/models/base.py b/django/db/models/base.py index 35c607ac2d..db228b3b58 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -419,6 +419,11 @@ class Model(six.with_metaclass(ModelBase, object)): def __str__(self): if not six.PY3 and hasattr(self, '__unicode__'): + if type(self).__unicode__ == Model.__str__: + klass_name = type(self).__name__ + raise RuntimeError("%s.__unicode__ is aliased to __str__. Did" + " you apply @python_2_unicode_compatible" + " without defining __str__?" % klass_name) return force_text(self).encode('utf-8') return '%s object' % self.__class__.__name__ diff --git a/django/db/models/expressions.py b/django/db/models/expressions.py index 30c44bacde..3566d777c6 100644 --- a/django/db/models/expressions.py +++ b/django/db/models/expressions.py @@ -62,7 +62,9 @@ class ExpressionNode(tree.Node): def __truediv__(self, other): return self._combine(other, self.DIV, False) - __div__ = __truediv__ # Python 2 compatibility + + def __div__(self, other): # Python 2 compatibility + return type(self).__truediv__(self, other) def __mod__(self, other): return self._combine(other, self.MOD, False) @@ -94,7 +96,9 @@ class ExpressionNode(tree.Node): def __rtruediv__(self, other): return self._combine(other, self.DIV, True) - __rdiv__ = __rtruediv__ # Python 2 compatibility + + def __rdiv__(self, other): # Python 2 compatibility + return type(self).__rtruediv__(self, other) def __rmod__(self, other): return self._combine(other, self.MOD, True) @@ -151,10 +155,10 @@ class DateModifierNode(ExpressionNode): (A custom function is used in order to preserve six digits of fractional second information on sqlite, and to format both date and datetime values.) - Note that microsecond comparisons are not well supported with MySQL, since + Note that microsecond comparisons are not well supported with MySQL, since MySQL does not store microsecond information. - Only adding and subtracting timedeltas is supported, attempts to use other + Only adding and subtracting timedeltas is supported, attempts to use other operations raise a TypeError. """ def __init__(self, children, connector, negated=False): diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 92d35dc720..3bd426d2b3 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -54,14 +54,16 @@ def add_lazy_relation(cls, field, relation, operation): else: # Look for an "app.Model" relation - try: - app_label, model_name = relation.split(".") - except ValueError: - # If we can't split, assume a model in current app - app_label = cls._meta.app_label - model_name = relation - except AttributeError: - # If it doesn't have a split it's actually a model class + + if isinstance(relation, six.string_types): + try: + app_label, model_name = relation.split(".") + except ValueError: + # If we can't split, assume a model in current app + app_label = cls._meta.app_label + model_name = relation + else: + # it's actually a model class app_label = relation._meta.app_label model_name = relation._meta.object_name @@ -573,9 +575,31 @@ def create_many_related_manager(superclass, rel): self.reverse = reverse self.through = through self.prefetch_cache_name = prefetch_cache_name - self._pk_val = self.instance.pk - if self._pk_val is None: - raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__) + self._fk_val = self._get_fk_val(instance, source_field_name) + if self._fk_val is None: + raise ValueError('"%r" needs to have a value for field "%s" before ' + 'this many-to-many relationship can be used.' % + (instance, source_field_name)) + # Even if this relation is not to pk, we require still pk value. + # The wish is that the instance has been already saved to DB, + # although having a pk value isn't a guarantee of that. + if instance.pk is None: + raise ValueError("%r instance needs to have a primary key value before " + "a many-to-many relationship can be used." % + instance.__class__.__name__) + + + def _get_fk_val(self, obj, field_name): + """ + Returns the correct value for this relationship's foreign key. This + might be something else than pk value when to_field is used. + """ + fk = self.through._meta.get_field(field_name) + if fk.rel.field_name and fk.rel.field_name != fk.rel.to._meta.pk.attname: + attname = fk.rel.get_related_field().get_attname() + return fk.get_prep_lookup('exact', getattr(obj, attname)) + else: + return obj.pk def get_query_set(self): try: @@ -677,7 +701,11 @@ def create_many_related_manager(superclass, rel): if not router.allow_relation(obj, self.instance): raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' % (obj, self.instance._state.db, obj._state.db)) - new_ids.add(obj.pk) + fk_val = self._get_fk_val(obj, target_field_name) + if fk_val is None: + raise ValueError('Cannot add "%r": the value for field "%s" is None' % + (obj, target_field_name)) + new_ids.add(self._get_fk_val(obj, target_field_name)) elif isinstance(obj, Model): raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj)) else: @@ -685,7 +713,7 @@ def create_many_related_manager(superclass, rel): db = router.db_for_write(self.through, instance=self.instance) vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True) vals = vals.filter(**{ - source_field_name: self._pk_val, + source_field_name: self._fk_val, '%s__in' % target_field_name: new_ids, }) new_ids = new_ids - set(vals) @@ -699,11 +727,12 @@ def create_many_related_manager(superclass, rel): # Add the ones that aren't there already self.through._default_manager.using(db).bulk_create([ self.through(**{ - '%s_id' % source_field_name: self._pk_val, + '%s_id' % source_field_name: self._fk_val, '%s_id' % target_field_name: obj_id, }) for obj_id in new_ids ]) + if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are inserting the # duplicate data row for symmetrical reverse entries. @@ -722,7 +751,7 @@ def create_many_related_manager(superclass, rel): old_ids = set() for obj in objs: if isinstance(obj, self.model): - old_ids.add(obj.pk) + old_ids.add(self._get_fk_val(obj, target_field_name)) else: old_ids.add(obj) # Work out what DB we're operating on @@ -736,7 +765,7 @@ def create_many_related_manager(superclass, rel): model=self.model, pk_set=old_ids, using=db) # Remove the specified objects from the join table self.through._default_manager.using(db).filter(**{ - source_field_name: self._pk_val, + source_field_name: self._fk_val, '%s__in' % target_field_name: old_ids }).delete() if self.reverse or source_field_name == self.source_field_name: @@ -756,7 +785,7 @@ def create_many_related_manager(superclass, rel): instance=self.instance, reverse=self.reverse, model=self.model, pk_set=None, using=db) self.through._default_manager.using(db).filter(**{ - source_field_name: self._pk_val + source_field_name: self._fk_val }).delete() if self.reverse or source_field_name == self.source_field_name: # Don't send the signal when we are clearing the @@ -1026,11 +1055,6 @@ class ForeignKey(RelatedField, Field): def contribute_to_class(self, cls, name): super(ForeignKey, self).contribute_to_class(cls, name) setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self)) - if isinstance(self.rel.to, six.string_types): - target = self.rel.to - else: - target = self.rel.to._meta.db_table - cls._meta.duplicate_targets[self.column] = (target, "o2m") def contribute_to_related_class(self, cls, related): # Internal FK's - i.e., those with a related name ending with '+' - @@ -1269,12 +1293,6 @@ class ManyToManyField(RelatedField, Field): field.rel.through = model add_lazy_relation(cls, self, self.rel.through, resolve_through_model) - if isinstance(self.rel.to, six.string_types): - target = self.rel.to - else: - target = self.rel.to._meta.db_table - cls._meta.duplicate_targets[self.column] = (target, "m2m") - def contribute_to_related_class(self, cls, related): # Internal M2Ms (i.e., those with a related name ending with '+') # and swapped models don't get a related descriptor. diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 6ae37505a5..a36608e3e1 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -5,6 +5,7 @@ from django.core.exceptions import ImproperlyConfigured from django.utils.datastructures import SortedDict from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule +from django.utils._os import upath from django.utils import six import imp @@ -158,7 +159,7 @@ class AppCache(object): def get_models(self, app_mod=None, include_auto_created=False, include_deferred=False, - only_installed=True): + only_installed=True, include_swapped=False): """ Given a module containing models, returns a list of the models. Otherwise returns a list of all installed models. @@ -170,8 +171,16 @@ class AppCache(object): By default, models created to satisfy deferred attribute queries are *not* included in the list of models. However, if you specify include_deferred, they will be. + + By default, models that aren't part of installed apps will *not* + be included in the list of models. However, if you specify + only_installed=False, they will be. + + By default, models that have been swapped out will *not* be + included in the list of models. However, if you specify + include_swapped, they will be. """ - cache_key = (app_mod, include_auto_created, include_deferred, only_installed) + cache_key = (app_mod, include_auto_created, include_deferred, only_installed, include_swapped) try: return self._get_models_cache[cache_key] except KeyError: @@ -194,7 +203,8 @@ class AppCache(object): model_list.extend( model for model in app.values() if ((not model._deferred or include_deferred) and - (not model._meta.auto_created or include_auto_created)) + (not model._meta.auto_created or include_auto_created) and + (not model._meta.swapped or include_swapped)) ) self._get_models_cache[cache_key] = model_list return model_list @@ -226,8 +236,8 @@ class AppCache(object): # The same model may be imported via different paths (e.g. # appname.models and project.appname.models). We use the source # filename as a means to detect identity. - fname1 = os.path.abspath(sys.modules[model.__module__].__file__) - fname2 = os.path.abspath(sys.modules[model_dict[model_name].__module__].__file__) + fname1 = os.path.abspath(upath(sys.modules[model.__module__].__file__)) + fname2 = os.path.abspath(upath(sys.modules[model_dict[model_name].__module__].__file__)) # Since the filename extension could be .py the first time and # .pyc or .pyo the second time, ignore the extension when # comparing. diff --git a/django/db/models/options.py b/django/db/models/options.py index ace2816b0b..e3b4b7ba1b 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -9,11 +9,10 @@ from django.db.models.fields.related import ManyToManyRel from django.db.models.fields import AutoField, FieldDoesNotExist from django.db.models.fields.proxy import OrderWrt from django.db.models.loading import get_models, app_cache_ready -from django.utils.translation import activate, deactivate_all, get_language, string_concat -from django.utils.encoding import force_text, smart_text -from django.utils.datastructures import SortedDict from django.utils import six -from django.utils.encoding import python_2_unicode_compatible +from django.utils.datastructures import SortedDict +from django.utils.encoding import force_text, smart_text, python_2_unicode_compatible +from django.utils.translation import activate, deactivate_all, get_language, string_concat # Calculate the verbose_name by converting from InitialCaps to "lowercase with spaces". get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', ' \\1', class_name).lower().strip() @@ -21,7 +20,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]| DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering', 'unique_together', 'permissions', 'get_latest_by', 'order_with_respect_to', 'app_label', 'db_tablespace', - 'abstract', 'managed', 'proxy', 'swappable', 'auto_created', 'auto_register') + 'abstract', 'managed', 'proxy', 'swappable', 'auto_created', 'index_together', 'auto_register') @python_2_unicode_compatible @@ -34,6 +33,7 @@ class Options(object): self.db_table = '' self.ordering = [] self.unique_together = [] + self.index_together = [] self.permissions = [] self.object_name, self.app_label = None, app_label self.get_latest_by = None @@ -58,7 +58,6 @@ class Options(object): self.concrete_model = None self.swappable = None self.parents = SortedDict() - self.duplicate_targets = {} self.auto_created = False # To handle various inheritance situations, we need to track where @@ -78,6 +77,7 @@ class Options(object): from django.db.backends.util import truncate_name cls._meta = self + self.model = cls self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS # First, construct the default values for these options. self.object_name = cls.__name__ @@ -150,24 +150,6 @@ class Options(object): auto_created=True) model.add_to_class('id', auto) - # Determine any sets of fields that are pointing to the same targets - # (e.g. two ForeignKeys to the same remote model). The query - # construction code needs to know this. At the end of this, - # self.duplicate_targets will map each duplicate field column to the - # columns it duplicates. - collections = {} - for column, target in six.iteritems(self.duplicate_targets): - try: - collections[target].add(column) - except KeyError: - collections[target] = set([column]) - self.duplicate_targets = {} - for elt in six.itervalues(collections): - if len(elt) == 1: - continue - for column in elt: - self.duplicate_targets[column] = elt.difference(set([column])) - def add_field(self, field): # Insert the given field in the order in which it was created, using # the "creation_counter" attribute of the field. @@ -195,6 +177,12 @@ class Options(object): self.pk = field field.serialize = False + def pk_index(self): + """ + Returns the index of the primary key field in the self.fields list. + """ + return self.fields.index(self.pk) + def setup_proxy(self, target): """ Does the internal setup so that the current model is a proxy for @@ -481,7 +469,7 @@ class Options(object): a granparent or even more distant relation. """ if not self.parents: - return + return None if model in self.parents: return [model] for parent in self.parents: @@ -489,8 +477,7 @@ class Options(object): if res: res.insert(0, parent) return res - raise TypeError('%r is not an ancestor of this model' - % model._meta.module_name) + return None def get_parent_list(self): """ @@ -522,22 +509,3 @@ class Options(object): # of the chain to the ancestor is that parent # links return self.parents[parent] or parent_link - - def get_ordered_objects(self): - "Returns a list of Options objects that are ordered with respect to this object." - if not hasattr(self, '_ordered_objects'): - objects = [] - # TODO - #for klass in get_models(get_app(self.app_label)): - # opts = klass._meta - # if opts.order_with_respect_to and opts.order_with_respect_to.rel \ - # and self == opts.order_with_respect_to.rel.to._meta: - # objects.append(opts) - self._ordered_objects = objects - return self._ordered_objects - - def pk_index(self): - """ - Returns the index of the primary key field in the self.fields list. - """ - return self.fields.index(self.pk) diff --git a/django/db/models/query.py b/django/db/models/query.py index dc1ddf1606..f56d5d2842 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -5,6 +5,7 @@ The main QuerySet implementation. This provides the public API for the ORM. import copy import itertools import sys +import warnings from django.core import exceptions from django.db import connections, router, transaction, IntegrityError @@ -135,7 +136,9 @@ class QuerySet(object): except StopIteration: return False return True - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def __contains__(self, val): # The 'in' operator works without this method, due to __iter__. This @@ -698,6 +701,9 @@ class QuerySet(object): If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection. """ + if 'depth' in kwargs: + warnings.warn('The "depth" keyword argument has been deprecated.\n' + 'Use related field names instead.', PendingDeprecationWarning) depth = kwargs.pop('depth', 0) if kwargs: raise TypeError('Unexpected keyword arguments to select_related: %s' @@ -1071,7 +1077,7 @@ class ValuesQuerySet(QuerySet): def _as_sql(self, connection): """ - For ValueQuerySet (and subclasses like ValuesListQuerySet), they can + For ValuesQuerySet (and subclasses like ValuesListQuerySet), they can only be used as nested queries if they're already set up to select only a single field (in which case, that is the field column that is returned). This differs from QuerySet.as_sql(), where the column to @@ -1294,7 +1300,7 @@ class EmptyQuerySet(QuerySet): value_annotation = False def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None, - only_load=None, local_only=False): + only_load=None, from_parent=None): """ Helper function that recursively returns an information for a klass, to be used in get_cached_row. It exists just to compute this information only @@ -1314,8 +1320,10 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None, * only_load - if the query has had only() or defer() applied, this is the list of field names that will be returned. If None, the full field list for `klass` can be assumed. - * local_only - Only populate local fields. This is used when - following reverse select-related relations + * from_parent - the parent model used to get to this model + + Note that when travelling from parent to child, we will only load child + fields which aren't in the parent. """ if max_depth and requested is None and cur_depth > max_depth: # We've recursed deeply enough; stop now. @@ -1341,7 +1349,9 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None, for field, model in klass._meta.get_fields_with_model(): if field.name not in load_fields: skip.add(field.attname) - elif local_only and model is not None: + elif from_parent and issubclass(from_parent, model.__class__): + # Avoid loading fields already loaded for parent model for + # child models. continue else: init_list.append(field.attname) @@ -1355,16 +1365,22 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None, else: # Load all fields on klass - # We trying to not populate field_names variable for perfomance reason. - # If field_names variable is set, it is used to instantiate desired fields, - # by passing **dict(zip(field_names, fields)) as kwargs to Model.__init__ method. - # But kwargs version of Model.__init__ is slower, so we should avoid using - # it when it is not really neccesary. - if local_only and len(klass._meta.local_fields) != len(klass._meta.fields): - field_count = len(klass._meta.local_fields) - field_names = [f.attname for f in klass._meta.local_fields] - else: - field_count = len(klass._meta.fields) + field_count = len(klass._meta.fields) + # Check if we need to skip some parent fields. + if from_parent and len(klass._meta.local_fields) != len(klass._meta.fields): + # Only load those fields which haven't been already loaded into + # 'from_parent'. + non_seen_models = [p for p in klass._meta.get_parent_list() + if not issubclass(from_parent, p)] + # Load local fields, too... + non_seen_models.append(klass) + field_names = [f.attname for f in klass._meta.fields + if f.model in non_seen_models] + field_count = len(field_names) + # Try to avoid populating field_names variable for perfomance reasons. + # If field_names variable is set, we use **kwargs based model init + # which is slower than normal init. + if field_count == len(klass._meta.fields): field_names = () restricted = requested is not None @@ -1386,14 +1402,20 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None, if o.field.unique and select_related_descend(o.field, restricted, requested, only_load.get(o.model), reverse=True): next = requested[o.field.related_query_name()] + parent = klass if issubclass(o.model, klass) else None klass_info = get_klass_info(o.model, max_depth=max_depth, cur_depth=cur_depth+1, - requested=next, only_load=only_load, local_only=True) + requested=next, only_load=only_load, from_parent=parent) reverse_related_fields.append((o.field, klass_info)) + if field_names: + pk_idx = field_names.index(klass._meta.pk.attname) + else: + pk_idx = klass._meta.pk_index() - return klass, field_names, field_count, related_fields, reverse_related_fields + return klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx -def get_cached_row(row, index_start, using, klass_info, offset=0): +def get_cached_row(row, index_start, using, klass_info, offset=0, + parent_data=()): """ Helper function that recursively returns an object with the specified related attributes already populated. @@ -1408,25 +1430,29 @@ def get_cached_row(row, index_start, using, klass_info, offset=0): * offset - the number of additional fields that are known to exist in row for `klass`. This usually means the number of annotated results on `klass`. - * using - the database alias on which the query is being executed. + * using - the database alias on which the query is being executed. * klass_info - result of the get_klass_info function + * parent_data - parent model data in format (field, value). Used + to populate the non-local fields of child models. """ if klass_info is None: return None - klass, field_names, field_count, related_fields, reverse_related_fields = klass_info + klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info + fields = row[index_start : index_start + field_count] - # If all the select_related columns are None, then the related + # If the pk column is None (or the Oracle equivalent ''), then the related # object must be non-existent - set the relation to None. - # Otherwise, construct the related object. - if fields == (None,) * field_count: + if fields[pk_idx] == None or fields[pk_idx] == '': obj = None + elif field_names: + fields = list(fields) + for rel_field, value in parent_data: + field_names.append(rel_field.attname) + fields.append(value) + obj = klass(**dict(zip(field_names, fields))) else: - if field_names: - obj = klass(**dict(zip(field_names, fields))) - else: - obj = klass(*fields) - + obj = klass(*fields) # If an object was retrieved, set the database state. if obj: obj._state.db = using @@ -1456,34 +1482,35 @@ def get_cached_row(row, index_start, using, klass_info, offset=0): # Only handle the restricted case - i.e., don't do a depth # descent into reverse relations unless explicitly requested for f, klass_info in reverse_related_fields: + # Transfer data from this object to childs. + parent_data = [] + for rel_field, rel_model in klass_info[0]._meta.get_fields_with_model(): + if rel_model is not None and isinstance(obj, rel_model): + parent_data.append((rel_field, getattr(obj, rel_field.attname))) # Recursively retrieve the data for the related object - cached_row = get_cached_row(row, index_end, using, klass_info) + cached_row = get_cached_row(row, index_end, using, klass_info, + parent_data=parent_data) # If the recursive descent found an object, populate the # descriptor caches relevant to the object if cached_row: rel_obj, index_end = cached_row if obj is not None: - # If the field is unique, populate the - # reverse descriptor cache + # populate the reverse descriptor cache setattr(obj, f.related.get_cache_name(), rel_obj) if rel_obj is not None: # If the related object exists, populate # the descriptor cache. setattr(rel_obj, f.get_cache_name(), obj) - # Now populate all the non-local field values - # on the related object - for rel_field, rel_model in rel_obj._meta.get_fields_with_model(): - if rel_model is not None: + # Populate related object caches using parent data. + for rel_field, _ in parent_data: + if rel_field.rel: setattr(rel_obj, rel_field.attname, getattr(obj, rel_field.attname)) - # populate the field cache for any related object - # that has already been retrieved - if rel_field.rel: - try: - cached_obj = getattr(obj, rel_field.get_cache_name()) - setattr(rel_obj, rel_field.get_cache_name(), cached_obj) - except AttributeError: - # Related object hasn't been cached yet - pass + try: + cached_obj = getattr(obj, rel_field.get_cache_name()) + setattr(rel_obj, rel_field.get_cache_name(), cached_obj) + except AttributeError: + # Related object hasn't been cached yet + pass return obj, index_end diff --git a/django/db/models/signals.py b/django/db/models/signals.py index 4666169bec..2ef54a7ca7 100644 --- a/django/db/models/signals.py +++ b/django/db/models/signals.py @@ -2,15 +2,16 @@ from django.dispatch import Signal class_prepared = Signal(providing_args=["class"]) -pre_init = Signal(providing_args=["instance", "args", "kwargs"]) -post_init = Signal(providing_args=["instance"]) +pre_init = Signal(providing_args=["instance", "args", "kwargs"], use_caching=True) +post_init = Signal(providing_args=["instance"], use_caching=True) -pre_save = Signal(providing_args=["instance", "raw", "using", "update_fields"]) -post_save = Signal(providing_args=["instance", "raw", "created", "using", "update_fields"]) +pre_save = Signal(providing_args=["instance", "raw", "using", "update_fields"], + use_caching=True) +post_save = Signal(providing_args=["instance", "raw", "created", "using", "update_fields"], use_caching=True) -pre_delete = Signal(providing_args=["instance", "using"]) -post_delete = Signal(providing_args=["instance", "using"]) +pre_delete = Signal(providing_args=["instance", "using"], use_caching=True) +post_delete = Signal(providing_args=["instance", "using"], use_caching=True) -post_syncdb = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive"]) +post_syncdb = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive"], use_caching=True) -m2m_changed = Signal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"]) +m2m_changed = Signal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True) diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py index b9095e503a..4d846fb438 100644 --- a/django/db/models/sql/compiler.py +++ b/django/db/models/sql/compiler.py @@ -6,7 +6,7 @@ from django.db.backends.util import truncate_name from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import select_related_descend from django.db.models.sql.constants import (SINGLE, MULTI, ORDER_DIR, - GET_ITERATOR_CHUNK_SIZE) + GET_ITERATOR_CHUNK_SIZE, REUSE_ALL, SelectInfo) from django.db.models.sql.datastructures import EmptyResultSet from django.db.models.sql.expressions import SQLEvaluator from django.db.models.sql.query import get_order_dir, Query @@ -103,21 +103,12 @@ class SQLCompiler(object): result.append('WHERE %s' % where) params.extend(w_params) - grouping, gb_params = self.get_grouping() + grouping, gb_params = self.get_grouping(ordering_group_by) if grouping: if distinct_fields: raise NotImplementedError( "annotate() + distinct(fields) not implemented.") - if ordering: - # If the backend can't group by PK (i.e., any database - # other than MySQL), then any fields mentioned in the - # ordering clause needs to be in the group by clause. - if not self.connection.features.allows_group_by_pk: - for col, col_params in ordering_group_by: - if col not in grouping: - grouping.append(str(col)) - gb_params.extend(col_params) - else: + if not ordering: ordering = self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) params.extend(gb_params) @@ -188,7 +179,7 @@ class SQLCompiler(object): col_aliases = set() if self.query.select: only_load = self.deferred_to_columns() - for col in self.query.select: + for col, _ in self.query.select: if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias].table_name @@ -233,7 +224,7 @@ class SQLCompiler(object): for alias, aggregate in self.query.aggregate_select.items() ]) - for table, col in self.query.related_select_cols: + for (table, col), _ in self.query.related_select_cols: r = '%s.%s' % (qn(table), qn(col)) if with_aliases and col in col_aliases: c_alias = 'Col%d' % len(col_aliases) @@ -249,7 +240,7 @@ class SQLCompiler(object): return result def get_default_columns(self, with_aliases=False, col_aliases=None, - start_alias=None, opts=None, as_pairs=False, local_only=False): + start_alias=None, opts=None, as_pairs=False, from_parent=None): """ Computes the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via @@ -274,7 +265,8 @@ class SQLCompiler(object): if start_alias: seen = {None: start_alias} for field, model in opts.get_fields_with_model(): - if local_only and model is not None: + if from_parent and model is not None and issubclass(from_parent, model): + # Avoid loading data for already loaded parents. continue if start_alias: try: @@ -282,7 +274,8 @@ class SQLCompiler(object): except KeyError: link_field = opts.get_ancestor_link(model) alias = self.query.join((start_alias, model._meta.db_table, - link_field.column, model._meta.pk.column)) + link_field.column, model._meta.pk.column), + join_field=link_field) seen[model] = alias else: # If we're starting from the base model of the queryset, the @@ -378,7 +371,7 @@ class SQLCompiler(object): else: order = asc result.append('%s %s' % (field, order)) - group_by.append((field, [])) + group_by.append((str(field), [])) continue col, order = get_order_dir(field, asc) if col in self.query.aggregate_select: @@ -456,8 +449,8 @@ class SQLCompiler(object): """ if not alias: alias = self.query.get_initial_alias() - field, target, opts, joins, _, _ = self.query.setup_joins(pieces, - opts, alias, False) + field, target, opts, joins, _ = self.query.setup_joins( + pieces, opts, alias, REUSE_ALL) # We will later on need to promote those joins that were added to the # query afresh above. joins_to_promote = [j for j in joins if self.query.alias_refcount[j] < 2] @@ -509,20 +502,27 @@ class SQLCompiler(object): qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name first = True + from_params = [] for alias in self.query.tables: if not self.query.alias_refcount[alias]: continue try: - name, alias, join_type, lhs, lhs_col, col, nullable = self.query.alias_map[alias] + name, alias, join_type, lhs, lhs_col, col, _, join_field = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue alias_str = (alias != name and ' %s' % alias or '') if join_type and not first: - result.append('%s %s%s ON (%s.%s = %s.%s)' - % (join_type, qn(name), alias_str, qn(lhs), - qn2(lhs_col), qn(alias), qn2(col))) + if join_field and hasattr(join_field, 'get_extra_join_sql'): + extra_cond, extra_params = join_field.get_extra_join_sql( + self.connection, qn, lhs, alias) + from_params.extend(extra_params) + else: + extra_cond = "" + result.append('%s %s%s ON (%s.%s = %s.%s%s)' % + (join_type, qn(name), alias_str, qn(lhs), + qn2(lhs_col), qn(alias), qn2(col), extra_cond)) else: connector = not first and ', ' or '' result.append('%s%s%s' % (connector, qn(name), alias_str)) @@ -536,45 +536,58 @@ class SQLCompiler(object): connector = not first and ', ' or '' result.append('%s%s' % (connector, qn(alias))) first = False - return result, [] + return result, from_params - def get_grouping(self): + def get_grouping(self, ordering_group_by): """ Returns a tuple representing the SQL elements in the "group by" clause. """ qn = self.quote_name_unless_alias result, params = [], [] if self.query.group_by is not None: - if (len(self.query.model._meta.fields) == len(self.query.select) and - self.connection.features.allows_group_by_pk): + select_cols = self.query.select + self.query.related_select_cols + # Just the column, not the fields. + select_cols = [s[0] for s in select_cols] + if (len(self.query.model._meta.fields) == len(self.query.select) + and self.connection.features.allows_group_by_pk): self.query.group_by = [ (self.query.model._meta.db_table, self.query.model._meta.pk.column) ] - - group_by = self.query.group_by or [] - - extra_selects = [] - for extra_select, extra_params in six.itervalues(self.query.extra_select): - extra_selects.append(extra_select) - params.extend(extra_params) - cols = (group_by + self.query.select + - self.query.related_select_cols + extra_selects) + select_cols = [] seen = set() + cols = self.query.group_by + select_cols for col in cols: - if col in seen: - continue - seen.add(col) if isinstance(col, (list, tuple)): - result.append('%s.%s' % (qn(col[0]), qn(col[1]))) + sql = '%s.%s' % (qn(col[0]), qn(col[1])) elif hasattr(col, 'as_sql'): - result.append(col.as_sql(qn, self.connection)) + sql = col.as_sql(qn, self.connection) else: - result.append('(%s)' % str(col)) + sql = '(%s)' % str(col) + if sql not in seen: + result.append(sql) + seen.add(sql) + + # Still, we need to add all stuff in ordering (except if the backend can + # group by just by PK). + if ordering_group_by and not self.connection.features.allows_group_by_pk: + for order, order_params in ordering_group_by: + # Even if we have seen the same SQL string, it might have + # different params, so, we add same SQL in "has params" case. + if order not in seen or params: + result.append(order) + params.extend(order_params) + seen.add(order) + + # Unconditionally add the extra_select items. + for extra_select, extra_params in self.query.extra_select.values(): + sql = '(%s)' % str(extra_select) + result.append(sql) + params.extend(extra_params) + return result, params def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, - used=None, requested=None, restricted=None, nullable=None, - dupe_set=None, avoid_set=None): + requested=None, restricted=None, nullable=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model @@ -589,14 +602,6 @@ class SQLCompiler(object): opts = self.query.get_meta() root_alias = self.query.get_initial_alias() self.query.related_select_cols = [] - self.query.related_select_fields = [] - if not used: - used = set() - if dupe_set is None: - dupe_set = set() - if avoid_set is None: - avoid_set = set() - orig_dupe_set = dupe_set only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be @@ -616,12 +621,6 @@ class SQLCompiler(object): if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue - # The "avoid" set is aliases we want to avoid just for this - # particular branch of the recursion. They aren't permanently - # forbidden from reuse in the related selection tables (which is - # what "used" specifies). - avoid = avoid_set.copy() - dupe_set = orig_dupe_set.copy() table = f.rel.to._meta.db_table promote = nullable or f.null if model: @@ -637,44 +636,28 @@ class SQLCompiler(object): int_opts = int_model._meta continue lhs_col = int_opts.parents[int_model].column - dedupe = lhs_col in opts.duplicate_targets - if dedupe: - avoid.update(self.query.dupe_avoidance.get((id(opts), lhs_col), - ())) - dupe_set.add((opts, lhs_col)) int_opts = int_model._meta alias = self.query.join((alias, int_opts.db_table, lhs_col, - int_opts.pk.column), exclusions=used, + int_opts.pk.column), promote=promote) alias_chain.append(alias) - for (dupe_opts, dupe_col) in dupe_set: - self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) else: alias = root_alias - dedupe = f.column in opts.duplicate_targets - if dupe_set or dedupe: - avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ())) - if dedupe: - dupe_set.add((opts, f.column)) - alias = self.query.join((alias, table, f.column, f.rel.get_related_field().column), - exclusions=used.union(avoid), promote=promote) - used.add(alias) + promote=promote, join_field=f) columns, aliases = self.get_default_columns(start_alias=alias, opts=f.rel.to._meta, as_pairs=True) - self.query.related_select_cols.extend(columns) - self.query.related_select_fields.extend(f.rel.to._meta.fields) + self.query.related_select_cols.extend( + SelectInfo(col, field) for col, field in zip(columns, f.rel.to._meta.fields)) if restricted: next = requested.get(f.name, {}) else: next = False new_nullable = f.null or promote - for dupe_opts, dupe_col in dupe_set: - self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1, - used, next, restricted, new_nullable, dupe_set, avoid) + next, restricted, new_nullable) if restricted: related_fields = [ @@ -686,14 +669,8 @@ class SQLCompiler(object): if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue - # The "avoid" set is aliases we want to avoid just for this - # particular branch of the recursion. They aren't permanently - # forbidden from reuse in the related selection tables (which is - # what "used" specifies). - avoid = avoid_set.copy() - dupe_set = orig_dupe_set.copy() - table = model._meta.db_table + table = model._meta.db_table int_opts = opts alias = root_alias alias_chain = [] @@ -708,42 +685,30 @@ class SQLCompiler(object): int_opts = int_model._meta continue lhs_col = int_opts.parents[int_model].column - dedupe = lhs_col in opts.duplicate_targets - if dedupe: - avoid.update((self.query.dupe_avoidance.get(id(opts), lhs_col), - ())) - dupe_set.add((opts, lhs_col)) int_opts = int_model._meta alias = self.query.join( (alias, int_opts.db_table, lhs_col, int_opts.pk.column), - exclusions=used, promote=True, reuse=used + promote=True, ) alias_chain.append(alias) - for dupe_opts, dupe_col in dupe_set: - self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias) - dedupe = f.column in opts.duplicate_targets - if dupe_set or dedupe: - avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ())) - if dedupe: - dupe_set.add((opts, f.column)) alias = self.query.join( (alias, table, f.rel.get_related_field().column, f.column), - exclusions=used.union(avoid), - promote=True + promote=True, join_field=f ) - used.add(alias) + from_parent = (opts.model if issubclass(model, opts.model) + else None) columns, aliases = self.get_default_columns(start_alias=alias, - opts=model._meta, as_pairs=True, local_only=True) - self.query.related_select_cols.extend(columns) - self.query.related_select_fields.extend(model._meta.fields) - + opts=model._meta, as_pairs=True, from_parent=from_parent) + self.query.related_select_cols.extend( + SelectInfo(col, field) for col, field + in zip(columns, model._meta.fields)) next = requested.get(f.related_query_name(), {}) # Use True here because we are looking at the _reverse_ side of # the relation, which is always nullable. new_nullable = True self.fill_related_selections(model._meta, table, cur_depth+1, - used, next, restricted, new_nullable) + next, restricted, new_nullable) def deferred_to_columns(self): """ @@ -772,7 +737,7 @@ class SQLCompiler(object): if resolve_columns: if fields is None: # We only set this up here because - # related_select_fields isn't populated until + # related_select_cols isn't populated until # execute_sql() has been called. # We also include types of fields of related models that @@ -782,11 +747,11 @@ class SQLCompiler(object): # This code duplicates the logic for the order of fields # found in get_columns(). It would be nice to clean this up. - if self.query.select_fields: - fields = self.query.select_fields + if self.query.select: + fields = [f.field for f in self.query.select] else: fields = self.query.model._meta.fields - fields = fields + self.query.related_select_fields + fields = fields + [f.field for f in self.query.related_select_cols] # If the field was deferred, exclude it from being passed # into `resolve_columns` because it wasn't selected. @@ -902,6 +867,8 @@ class SQLInsertCompiler(SQLCompiler): [self.placeholder(field, v) for field, v in zip(fields, val)] for val in values ] + # Oracle Spatial needs to remove some values due to #10888 + params = self.connection.ops.modify_insert_params(placeholders, params) if self.return_id and self.connection.features.can_return_id_from_insert: params = params[0] col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column)) diff --git a/django/db/models/sql/constants.py b/django/db/models/sql/constants.py index f750310624..1c34f70169 100644 --- a/django/db/models/sql/constants.py +++ b/django/db/models/sql/constants.py @@ -18,12 +18,22 @@ QUERY_TERMS = set([ # Larger values are slightly faster at the expense of more storage space. GET_ITERATOR_CHUNK_SIZE = 100 -# Constants to make looking up tuple values clearer. +# Namedtuples for sql.* internal use. + # Join lists (indexes into the tuples that are values in the alias_map # dictionary in the Query class). JoinInfo = namedtuple('JoinInfo', 'table_name rhs_alias join_type lhs_alias ' - 'lhs_join_col rhs_join_col nullable') + 'lhs_join_col rhs_join_col nullable join_field') + +# PathInfo is used when converting lookups (fk__somecol). The contents +# describe the join in Model terms (model Options and Fields for both +# sides of the join. The rel_field is the field we are joining along. +PathInfo = namedtuple('PathInfo', + 'from_field to_field from_opts to_opts join_field') + +# Pairs of column clauses to select, and (possibly None) field for the clause. +SelectInfo = namedtuple('SelectInfo', 'col field') # How many results to expect from a cursor.execute call MULTI = 'multi' @@ -34,3 +44,6 @@ ORDER_DIR = { 'ASC': ('ASC', 'DESC'), 'DESC': ('DESC', 'ASC'), } + +# A marker for join-reusability. +REUSE_ALL = object() diff --git a/django/db/models/sql/expressions.py b/django/db/models/sql/expressions.py index 374509914d..af7e45e74e 100644 --- a/django/db/models/sql/expressions.py +++ b/django/db/models/sql/expressions.py @@ -1,14 +1,16 @@ from django.core.exceptions import FieldError from django.db.models.constants import LOOKUP_SEP from django.db.models.fields import FieldDoesNotExist +from django.db.models.sql.constants import REUSE_ALL class SQLEvaluator(object): - def __init__(self, expression, query, allow_joins=True): + def __init__(self, expression, query, allow_joins=True, reuse=REUSE_ALL): self.expression = expression self.opts = query.get_meta() self.cols = [] self.contains_aggregate = False + self.reuse = reuse self.expression.prepare(self, query, allow_joins) def prepare(self): @@ -48,11 +50,12 @@ class SQLEvaluator(object): self.cols.append((node, query.aggregate_select[node.name])) else: try: - field, source, opts, join_list, last, _ = query.setup_joins( + field, source, opts, join_list, path = query.setup_joins( field_list, query.get_meta(), - query.get_initial_alias(), False) - col, _, join_list = query.trim_joins(source, join_list, last, False) - + query.get_initial_alias(), self.reuse) + col, _, join_list = query.trim_joins(source, join_list, path) + if self.reuse is not None and self.reuse != REUSE_ALL: + self.reuse.update(join_list) self.cols.append((node, (join_list[-1], col))) except FieldDoesNotExist: raise FieldError("Cannot resolve keyword %r into field. " diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py index cef01c48ab..ff56211c5d 100644 --- a/django/db/models/sql/query.py +++ b/django/db/models/sql/query.py @@ -14,13 +14,13 @@ from django.utils.encoding import force_text from django.utils.tree import Node from django.utils import six from django.db import connections, DEFAULT_DB_ALIAS -from django.db.models import signals from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import ExpressionNode from django.db.models.fields import FieldDoesNotExist +from django.db.models.loading import get_model from django.db.models.sql import aggregates as base_aggregates_module from django.db.models.sql.constants import (QUERY_TERMS, ORDER_DIR, SINGLE, - ORDER_PATTERN, JoinInfo) + ORDER_PATTERN, REUSE_ALL, JoinInfo, SelectInfo, PathInfo) from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin from django.db.models.sql.expressions import SQLEvaluator from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode, @@ -103,7 +103,7 @@ class Query(object): def __init__(self, model, where=WhereNode): self.model = model - self.alias_refcount = SortedDict() + self.alias_refcount = {} # alias_map is the most important data structure regarding joins. # It's used for recording which joins exist in the query and what # type they are. The key is the alias of the joined table (possibly @@ -115,17 +115,19 @@ class Query(object): self.default_ordering = True self.standard_ordering = True self.ordering_aliases = [] - self.related_select_fields = [] - self.dupe_avoidance = {} self.used_aliases = set() self.filter_is_sticky = False self.included_inherited_models = {} # SQL-related attributes + # Select and related select clauses as SelectInfo instances. + # The select is used for cases where we want to set up the select + # clause to contain other than default fields (values(), annotate(), + # subqueries...) self.select = [] - # For each to-be-selected field in self.select there must be a - # corresponding entry in self.select - git seems to need this. - self.select_fields = [] + # The related_select_cols is used for columns needed for + # select_related - this is populated in compile stage. + self.related_select_cols = [] self.tables = [] # Aliases in the order they are created. self.where = where() self.where_class = where @@ -138,7 +140,6 @@ class Query(object): self.select_for_update = False self.select_for_update_nowait = False self.select_related = False - self.related_select_cols = [] # SQL aggregate-related attributes self.aggregates = SortedDict() # Maps alias -> SQL aggregate function @@ -191,16 +192,25 @@ class Query(object): Pickling support. """ obj_dict = self.__dict__.copy() - obj_dict['related_select_fields'] = [] obj_dict['related_select_cols'] = [] # Fields can't be pickled, so if a field list has been # specified, we pickle the list of field names instead. # None is also a possible value; that can pass as-is - obj_dict['select_fields'] = [ - f is not None and f.name or None - for f in obj_dict['select_fields'] + obj_dict['select'] = [ + (s.col, s.field is not None and s.field.name or None) + for s in obj_dict['select'] ] + # alias_map can also contain references to fields. + new_alias_map = {} + for alias, join_info in obj_dict['alias_map'].items(): + if join_info.join_field is None: + new_alias_map[alias] = join_info + else: + model = join_info.join_field.model._meta + field_id = (model.app_label, model.object_name, join_info.join_field.name) + new_alias_map[alias] = join_info._replace(join_field=field_id) + obj_dict['alias_map'] = new_alias_map return obj_dict def __setstate__(self, obj_dict): @@ -209,10 +219,19 @@ class Query(object): """ # Rebuild list of field instances opts = obj_dict['model']._meta - obj_dict['select_fields'] = [ - name is not None and opts.get_field(name) or None - for name in obj_dict['select_fields'] + obj_dict['select'] = [ + SelectInfo(tpl[0], tpl[1] is not None and opts.get_field(tpl[1]) or None) + for tpl in obj_dict['select'] ] + new_alias_map = {} + for alias, join_info in obj_dict['alias_map'].items(): + if join_info.join_field is None: + new_alias_map[alias] = join_info + else: + field_id = join_info.join_field + new_alias_map[alias] = join_info._replace( + join_field=get_model(field_id[0], field_id[1])._meta.get_field(field_id[2])) + obj_dict['alias_map'] = new_alias_map self.__dict__.update(obj_dict) @@ -256,10 +275,8 @@ class Query(object): obj.standard_ordering = self.standard_ordering obj.included_inherited_models = self.included_inherited_models.copy() obj.ordering_aliases = [] - obj.select_fields = self.select_fields[:] - obj.related_select_fields = self.related_select_fields[:] - obj.dupe_avoidance = self.dupe_avoidance.copy() obj.select = self.select[:] + obj.related_select_cols = [] obj.tables = self.tables[:] obj.where = copy.deepcopy(self.where, memo=memo) obj.where_class = self.where_class @@ -275,7 +292,6 @@ class Query(object): obj.select_for_update = self.select_for_update obj.select_for_update_nowait = self.select_for_update_nowait obj.select_related = self.select_related - obj.related_select_cols = [] obj.aggregates = copy.deepcopy(self.aggregates, memo=memo) if self.aggregate_select_mask is None: obj.aggregate_select_mask = None @@ -384,7 +400,6 @@ class Query(object): query.select_for_update = False query.select_related = False query.related_select_cols = [] - query.related_select_fields = [] result = query.get_compiler(using).execute_sql(SINGLE) if result is None: @@ -462,24 +477,47 @@ class Query(object): self.remove_inherited_models() # Work out how to relabel the rhs aliases, if necessary. change_map = {} - used = set() conjunction = (connector == AND) - # Add the joins in the rhs query into the new query. - first = True - for alias in rhs.tables: - if not rhs.alias_refcount[alias]: - # An unused alias. - continue - table, _, join_type, lhs, lhs_col, col, _ = rhs.alias_map[alias] - promote = join_type == self.LOUTER + + # Determine which existing joins can be reused. When combining the + # query with AND we must recreate all joins for m2m filters. When + # combining with OR we can reuse joins. The reason is that in AND + # case a single row can't fulfill a condition like: + # revrel__col=1 & revrel__col=2 + # But, there might be two different related rows matching this + # condition. In OR case a single True is enough, so single row is + # enough, too. + # + # Note that we will be creating duplicate joins for non-m2m joins in + # the AND case. The results will be correct but this creates too many + # joins. This is something that could be fixed later on. + reuse = set() if conjunction else set(self.tables) + # Base table must be present in the query - this is the same + # table on both sides. + self.get_initial_alias() + # Now, add the joins from rhs query into the new query (skipping base + # table). + for alias in rhs.tables[1:]: + table, _, join_type, lhs, lhs_col, col, nullable, join_field = rhs.alias_map[alias] + promote = (join_type == self.LOUTER) # If the left side of the join was already relabeled, use the # updated alias. lhs = change_map.get(lhs, lhs) - new_alias = self.join((lhs, table, lhs_col, col), - conjunction and not first, used, promote, not conjunction) - used.add(new_alias) + new_alias = self.join( + (lhs, table, lhs_col, col), reuse=reuse, promote=promote, + outer_if_first=not conjunction, nullable=nullable, + join_field=join_field) + # We can't reuse the same join again in the query. If we have two + # distinct joins for the same connection in rhs query, then the + # combined query must have two joins, too. + reuse.discard(new_alias) change_map[alias] = new_alias - first = False + if not rhs.alias_refcount[alias]: + # The alias was unused in the rhs query. Unref it so that it + # will be unused in the new query, too. We have to add and + # unref the alias so that join promotion has information of + # the join type for the unused alias. + self.unref_alias(new_alias) # So that we don't exclude valid results in an "or" query combination, # all joins exclusive to either the lhs or the rhs must be converted @@ -527,14 +565,14 @@ class Query(object): # Selection columns and extra extensions are those provided by 'rhs'. self.select = [] - for col in rhs.select: + for col, field in rhs.select: if isinstance(col, (list, tuple)): - self.select.append((change_map.get(col[0], col[0]), col[1])) + new_col = change_map.get(col[0], col[0]), col[1] + self.select.append(SelectInfo(new_col, field)) else: item = copy.deepcopy(col) item.relabel_aliases(change_map) - self.select.append(item) - self.select_fields = rhs.select_fields[:] + self.select.append(SelectInfo(item, field)) if connector == OR: # It would be nice to be able to handle this, but the queries don't @@ -585,17 +623,22 @@ class Query(object): for name in parts[:-1]: old_model = cur_model source = opts.get_field_by_name(name)[0] - cur_model = source.rel.to + if is_reverse_o2o(source): + cur_model = source.model + else: + cur_model = source.rel.to opts = cur_model._meta # Even if we're "just passing through" this model, we must add # both the current model's pk and the related reference field - # to the things we select. - must_include[old_model].add(source) + # (if it's not a reverse relation) to the things we select. + if not is_reverse_o2o(source): + must_include[old_model].add(source) add_to_dict(must_include, cur_model, opts.pk) field, model, _, _ = opts.get_field_by_name(parts[-1]) if model is None: model = cur_model - add_to_dict(seen, model, field) + if not is_reverse_o2o(field): + add_to_dict(seen, model, field) if defer: # We need to load all fields for each model, except those that @@ -750,29 +793,30 @@ class Query(object): """ assert set(change_map.keys()).intersection(set(change_map.values())) == set() + def relabel_column(col): + if isinstance(col, (list, tuple)): + old_alias = col[0] + return (change_map.get(old_alias, old_alias), col[1]) + else: + col.relabel_aliases(change_map) + return col # 1. Update references in "select" (normal columns plus aliases), # "group by", "where" and "having". self.where.relabel_aliases(change_map) self.having.relabel_aliases(change_map) - for columns in [self.select, self.group_by or []]: - for pos, col in enumerate(columns): - if isinstance(col, (list, tuple)): - old_alias = col[0] - columns[pos] = (change_map.get(old_alias, old_alias), col[1]) - else: - col.relabel_aliases(change_map) - for mapping in [self.aggregates]: - for key, col in mapping.items(): - if isinstance(col, (list, tuple)): - old_alias = col[0] - mapping[key] = (change_map.get(old_alias, old_alias), col[1]) - else: - col.relabel_aliases(change_map) + if self.group_by: + self.group_by = [relabel_column(col) for col in self.group_by] + self.select = [SelectInfo(relabel_column(s.col), s.field) + for s in self.select] + self.aggregates = SortedDict( + (key, relabel_column(col)) for key, col in self.aggregates.items()) # 2. Rename the alias in the internal table/alias datastructures. - for k, aliases in self.join_map.items(): + for ident, aliases in self.join_map.items(): + del self.join_map[ident] aliases = tuple([change_map.get(a, a) for a in aliases]) - self.join_map[k] = aliases + ident = (change_map.get(ident[0], ident[0]),) + ident[1:] + self.join_map[ident] = aliases for old_alias, new_alias in six.iteritems(change_map): alias_data = self.alias_map[old_alias] alias_data = alias_data._replace(rhs_alias=new_alias) @@ -845,10 +889,10 @@ class Query(object): count. Note that after execution, the reference counts are zeroed, so tables added in compiler will not be seen by this method. """ - return len([1 for count in six.itervalues(self.alias_refcount) if count]) + return len([1 for count in self.alias_refcount.values() if count]) - def join(self, connection, always_create=False, exclusions=(), - promote=False, outer_if_first=False, nullable=False, reuse=None): + def join(self, connection, reuse=REUSE_ALL, promote=False, + outer_if_first=False, nullable=False, join_field=None): """ Returns an alias for the join in 'connection', either reusing an existing alias for that join or creating a new one. 'connection' is a @@ -858,56 +902,47 @@ class Query(object): lhs.lhs_col = table.col - If 'always_create' is True and 'reuse' is None, a new alias is always - created, regardless of whether one already exists or not. If - 'always_create' is True and 'reuse' is a set, an alias in 'reuse' that - matches the connection will be returned, if possible. If - 'always_create' is False, the first existing alias that matches the - 'connection' is returned, if any. Otherwise a new join is created. - - If 'exclusions' is specified, it is something satisfying the container - protocol ("foo in exclusions" must work) and specifies a list of - aliases that should not be returned, even if they satisfy the join. + The 'reuse' parameter can be used in three ways: it can be REUSE_ALL + which means all joins (matching the connection) are reusable, it can + be a set containing the aliases that can be reused, or it can be None + which means a new join is always created. If 'promote' is True, the join type for the alias will be LOUTER (if the alias previously existed, the join type will be promoted from INNER to LOUTER, if necessary). If 'outer_if_first' is True and a new join is created, it will have the - LOUTER join type. This is used when joining certain types of querysets - and Q-objects together. + LOUTER join type. Used for example when adding ORed filters, where we + want to use LOUTER joins except if some other join already restricts + the join to INNER join. A join is always created as LOUTER if the lhs alias is LOUTER to make - sure we do not generate chains like a LOUTER b INNER c. + sure we do not generate chains like t1 LOUTER t2 INNER t3. If 'nullable' is True, the join can potentially involve NULL values and is a candidate for promotion (to "left outer") when combining querysets. + + The 'join_field' is the field we are joining along (if any). """ lhs, table, lhs_col, col = connection - if lhs in self.alias_map: - lhs_table = self.alias_map[lhs].table_name + existing = self.join_map.get(connection, ()) + if reuse == REUSE_ALL: + reuse = existing + elif reuse is None: + reuse = set() else: - lhs_table = lhs - - if reuse and always_create and table in self.table_map: - # Convert the 'reuse' to case to be "exclude everything but the - # reusable set, minus exclusions, for this table". - exclusions = set(self.table_map[table]).difference(reuse).union(set(exclusions)) - always_create = False - t_ident = (lhs_table, table, lhs_col, col) - if not always_create: - for alias in self.join_map.get(t_ident, ()): - if alias not in exclusions: - if lhs_table and not self.alias_refcount[self.alias_map[alias].lhs_alias]: - # The LHS of this join tuple is no longer part of the - # query, so skip this possibility. - continue - if self.alias_map[alias].lhs_alias != lhs: - continue - self.ref_alias(alias) - if promote or (lhs and self.alias_map[lhs].join_type == self.LOUTER): - self.promote_joins([alias]) - return alias + reuse = [a for a in existing if a in reuse] + for alias in reuse: + if join_field and self.alias_map[alias].join_field != join_field: + # The join_map doesn't contain join_field (mainly because + # fields in Query structs are problematic in pickling), so + # check that the existing join is created using the same + # join_field used for the under work join. + continue + self.ref_alias(alias) + if promote or (lhs and self.alias_map[lhs].join_type == self.LOUTER): + self.promote_joins([alias]) + return alias # No reuse is possible, so we need a new alias. alias, _ = self.table_alias(table, True) @@ -918,18 +953,17 @@ class Query(object): elif (promote or outer_if_first or self.alias_map[lhs].join_type == self.LOUTER): # We need to use LOUTER join if asked by promote or outer_if_first, - # or if the LHS table is left-joined in the query. Adding inner join - # to an existing outer join effectively cancels the effect of the - # outer join. + # or if the LHS table is left-joined in the query. join_type = self.LOUTER else: join_type = self.INNER - join = JoinInfo(table, alias, join_type, lhs, lhs_col, col, nullable) + join = JoinInfo(table, alias, join_type, lhs, lhs_col, col, nullable, + join_field) self.alias_map[alias] = join - if t_ident in self.join_map: - self.join_map[t_ident] += (alias,) + if connection in self.join_map: + self.join_map[connection] += (alias,) else: - self.join_map[t_ident] = (alias,) + self.join_map[connection] = (alias,) return alias def setup_inherited_models(self): @@ -1005,11 +1039,11 @@ class Query(object): # - this is an annotation over a model field # then we need to explore the joins that are required. - field, source, opts, join_list, last, _ = self.setup_joins( - field_list, opts, self.get_initial_alias(), False) + field, source, opts, join_list, path = self.setup_joins( + field_list, opts, self.get_initial_alias(), REUSE_ALL) # Process the join chain to see if it can be trimmed - col, _, join_list = self.trim_joins(source, join_list, last, False) + col, _, join_list = self.trim_joins(source, join_list, path) # If the aggregate references a model or field that requires a join, # those joins must be LEFT OUTER - empty join rows must be returned @@ -1027,8 +1061,8 @@ class Query(object): # Add the aggregate to the query aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary) - def add_filter(self, filter_expr, connector=AND, negate=False, trim=False, - can_reuse=None, process_extras=True, force_having=False): + def add_filter(self, filter_expr, connector=AND, negate=False, + can_reuse=None, force_having=False): """ Add a single filter to the query. The 'filter_expr' is a pair: (filter_string, value). E.g. ('name__contains', 'fred') @@ -1040,18 +1074,11 @@ class Query(object): should only happen once. So the caller is responsible for this (the caller will normally be add_q(), so that as an example). - If 'trim' is True, we automatically trim the final join group (used - internally when constructing nested queries). - If 'can_reuse' is a set, we are processing a component of a multi-component filter (e.g. filter(Q1, Q2)). In this case, 'can_reuse' will be a set of table aliases that can be reused in this filter, even if we would otherwise force the creation of new aliases for a join (needed for nested Q-filters). The set is updated by this method. - - If 'process_extras' is set, any extra filters returned from the table - joining process will be processed. This parameter is set to False - during the processing of extra filters to avoid infinite recursion. """ arg, value = filter_expr parts = arg.split(LOOKUP_SEP) @@ -1099,7 +1126,7 @@ class Query(object): value = value() elif isinstance(value, ExpressionNode): # If value is a query expression, evaluate it - value = SQLEvaluator(value, self) + value = SQLEvaluator(value, self, reuse=can_reuse) having_clause = value.contains_aggregate for alias, aggregate in self.aggregates.items(): @@ -1113,13 +1140,14 @@ class Query(object): opts = self.get_meta() alias = self.get_initial_alias() - allow_many = trim or not negate + allow_many = not negate try: - field, target, opts, join_list, last, extra_filters = self.setup_joins( - parts, opts, alias, True, allow_many, allow_explicit_fk=True, - can_reuse=can_reuse, negate=negate, - process_extras=process_extras) + field, target, opts, join_list, path = self.setup_joins( + parts, opts, alias, can_reuse, allow_many, + allow_explicit_fk=True) + if can_reuse is not None: + can_reuse.update(join_list) except MultiJoin as e: self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]), can_reuse) @@ -1137,10 +1165,10 @@ class Query(object): join_promote = True # Process the join list to see if we can remove any inner joins from - # the far end (fewer tables in a query is better). - nonnull_comparison = (lookup_type == 'isnull' and value is False) - col, alias, join_list = self.trim_joins(target, join_list, last, trim, - nonnull_comparison) + # the far end (fewer tables in a query is better). Note that join + # promotion must happen before join trimming to have the join type + # information available when reusing joins. + col, alias, join_list = self.trim_joins(target, join_list, path) if connector == OR: # Some joins may need to be promoted when adding a new filter to a @@ -1213,12 +1241,6 @@ class Query(object): # is added in upper layers of the code. self.where.add((Constraint(alias, col, None), 'isnull', False), AND) - if can_reuse is not None: - can_reuse.update(join_list) - if process_extras: - for filter in extra_filters: - self.add_filter(filter, negate=negate, can_reuse=can_reuse, - process_extras=False) def add_q(self, q_object, used_aliases=None, force_having=False): """ @@ -1271,37 +1293,24 @@ class Query(object): if self.filter_is_sticky: self.used_aliases = used_aliases - def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True, - allow_explicit_fk=False, can_reuse=None, negate=False, - process_extras=True): + def names_to_path(self, names, opts, allow_many=False, + allow_explicit_fk=True): """ - Compute the necessary table joins for the passage through the fields - given in 'names'. 'opts' is the Options class for the current model - (which gives the table we are joining to), 'alias' is the alias for the - table we are joining to. If dupe_multis is True, any many-to-many or - many-to-one joins will always create a new alias (necessary for - disjunctive filters). If can_reuse is not None, it's a list of aliases - that can be reused in these joins (nothing else can be reused in this - case). Finally, 'negate' is used in the same sense as for add_filter() - -- it indicates an exclude() filter, or something similar. It is only - passed in here so that it can be passed to a field's extra_filter() for - customized behavior. + Walks the names path and turns them PathInfo tuples. Note that a + single name in 'names' can generate multiple PathInfos (m2m for + example). - Returns the final field involved in the join, the target database - column (used for any 'where' constraint), the final 'opts' value and the - list of tables joined. + 'names' is the path of names to travle, 'opts' is the model Options we + start the name resolving from, 'allow_many' and 'allow_explicit_fk' + are as for setup_joins(). + + Returns a list of PathInfo tuples. In addition returns the final field + (the last used join field), and target (which is a field guaranteed to + contain the same value as the final field). """ - joins = [alias] - last = [0] - dupe_set = set() - exclusions = set() - extra_filters = [] - int_alias = None + path = [] + multijoin_pos = None for pos, name in enumerate(names): - if int_alias is not None: - exclusions.add(int_alias) - exclusions.add(alias) - last.append(len(joins)) if name == 'pk': name = opts.pk.name try: @@ -1315,14 +1324,12 @@ class Query(object): field, model, direct, m2m = opts.get_field_by_name(f.name) break else: - names = opts.get_all_field_names() + list(self.aggregate_select) + available = opts.get_all_field_names() + list(self.aggregate_select) raise FieldError("Cannot resolve keyword %r into field. " - "Choices are: %s" % (name, ", ".join(names))) - - if not allow_many and (m2m or not direct): - for alias in joins: - self.unref_alias(alias) - raise MultiJoin(pos + 1) + "Choices are: %s" % (name, ", ".join(available))) + # Check if we need any joins for concrete inheritance cases (the + # field lives in parent, but we are currently in one of its + # children) if model: # The field lives on a base class of the current model. # Skip the chain of proxy to the concrete proxied model @@ -1332,227 +1339,179 @@ class Query(object): if int_model is proxied_model: opts = int_model._meta else: - lhs_col = opts.parents[int_model].column - dedupe = lhs_col in opts.duplicate_targets - if dedupe: - exclusions.update(self.dupe_avoidance.get( - (id(opts), lhs_col), ())) - dupe_set.add((opts, lhs_col)) + final_field = opts.parents[int_model] + target = final_field.rel.get_related_field() opts = int_model._meta - alias = self.join((alias, opts.db_table, lhs_col, - opts.pk.column), exclusions=exclusions) - joins.append(alias) - exclusions.add(alias) - for (dupe_opts, dupe_col) in dupe_set: - self.update_dupe_avoidance(dupe_opts, dupe_col, - alias) - cached_data = opts._join_cache.get(name) - orig_opts = opts - dupe_col = direct and field.column or field.field.column - dedupe = dupe_col in opts.duplicate_targets - if dupe_set or dedupe: - if dedupe: - dupe_set.add((opts, dupe_col)) - exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col), - ())) - - if process_extras and hasattr(field, 'extra_filters'): - extra_filters.extend(field.extra_filters(names, pos, negate)) - if direct: - if m2m: - # Many-to-many field defined on the current model. - if cached_data: - (table1, from_col1, to_col1, table2, from_col2, - to_col2, opts, target) = cached_data - else: - table1 = field.m2m_db_table() - from_col1 = opts.get_field_by_name( - field.m2m_target_field_name())[0].column - to_col1 = field.m2m_column_name() - opts = field.rel.to._meta - table2 = opts.db_table - from_col2 = field.m2m_reverse_name() - to_col2 = opts.get_field_by_name( - field.m2m_reverse_target_field_name())[0].column - target = opts.pk - orig_opts._join_cache[name] = (table1, from_col1, - to_col1, table2, from_col2, to_col2, opts, - target) - - int_alias = self.join((alias, table1, from_col1, to_col1), - dupe_multis, exclusions, nullable=True, - reuse=can_reuse) - if int_alias == table2 and from_col2 == to_col2: - joins.append(int_alias) - alias = int_alias - else: - alias = self.join( - (int_alias, table2, from_col2, to_col2), - dupe_multis, exclusions, nullable=True, - reuse=can_reuse) - joins.extend([int_alias, alias]) - elif field.rel: - # One-to-one or many-to-one field - if cached_data: - (table, from_col, to_col, opts, target) = cached_data - else: - opts = field.rel.to._meta - target = field.rel.get_related_field() - table = opts.db_table - from_col = field.column - to_col = target.column - orig_opts._join_cache[name] = (table, from_col, to_col, - opts, target) - - alias = self.join((alias, table, from_col, to_col), - exclusions=exclusions, - nullable=self.is_nullable(field)) - joins.append(alias) + path.append(PathInfo(final_field, target, final_field.model._meta, + opts, final_field)) + # We have five different cases to solve: foreign keys, reverse + # foreign keys, m2m fields (also reverse) and non-relational + # fields. We are mostly just using the related field API to + # fetch the from and to fields. The m2m fields are handled as + # two foreign keys, first one reverse, the second one direct. + if direct and not field.rel and not m2m: + # Local non-relational field. + final_field = target = field + break + elif direct and not m2m: + # Foreign Key + opts = field.rel.to._meta + target = field.rel.get_related_field() + final_field = field + from_opts = field.model._meta + path.append(PathInfo(field, target, from_opts, opts, field)) + elif not direct and not m2m: + # Revere foreign key + final_field = to_field = field.field + opts = to_field.model._meta + from_field = to_field.rel.get_related_field() + from_opts = from_field.model._meta + path.append( + PathInfo(from_field, to_field, from_opts, opts, to_field)) + if from_field.model is to_field.model: + # Recursive foreign key to self. + target = opts.get_field_by_name( + field.field.rel.field_name)[0] else: - # Non-relation fields. - target = field - break - else: - orig_field = field + target = opts.pk + elif direct and m2m: + if not field.rel.through: + # Gotcha! This is just a fake m2m field - a generic relation + # field). + from_field = opts.pk + opts = field.rel.to._meta + target = opts.get_field_by_name(field.object_id_field_name)[0] + final_field = field + # Note that we are using different field for the join_field + # than from_field or to_field. This is a hack, but we need the + # GenericRelation to generate the extra SQL. + path.append(PathInfo(from_field, target, field.model._meta, opts, + field)) + else: + # m2m field. We are travelling first to the m2m table along a + # reverse relation, then from m2m table to the target table. + from_field1 = opts.get_field_by_name( + field.m2m_target_field_name())[0] + opts = field.rel.through._meta + to_field1 = opts.get_field_by_name(field.m2m_field_name())[0] + path.append( + PathInfo(from_field1, to_field1, from_field1.model._meta, + opts, to_field1)) + final_field = from_field2 = opts.get_field_by_name( + field.m2m_reverse_field_name())[0] + opts = field.rel.to._meta + target = to_field2 = opts.get_field_by_name( + field.m2m_reverse_target_field_name())[0] + path.append( + PathInfo(from_field2, to_field2, from_field2.model._meta, + opts, from_field2)) + elif not direct and m2m: + # This one is just like above, except we are travelling the + # fields in opposite direction. field = field.field - if m2m: - # Many-to-many field defined on the target model. - if cached_data: - (table1, from_col1, to_col1, table2, from_col2, - to_col2, opts, target) = cached_data - else: - table1 = field.m2m_db_table() - from_col1 = opts.get_field_by_name( - field.m2m_reverse_target_field_name())[0].column - to_col1 = field.m2m_reverse_name() - opts = orig_field.opts - table2 = opts.db_table - from_col2 = field.m2m_column_name() - to_col2 = opts.get_field_by_name( - field.m2m_target_field_name())[0].column - target = opts.pk - orig_opts._join_cache[name] = (table1, from_col1, - to_col1, table2, from_col2, to_col2, opts, - target) + from_field1 = opts.get_field_by_name( + field.m2m_reverse_target_field_name())[0] + int_opts = field.rel.through._meta + to_field1 = int_opts.get_field_by_name( + field.m2m_reverse_field_name())[0] + path.append( + PathInfo(from_field1, to_field1, from_field1.model._meta, + int_opts, to_field1)) + final_field = from_field2 = int_opts.get_field_by_name( + field.m2m_field_name())[0] + opts = field.opts + target = to_field2 = opts.get_field_by_name( + field.m2m_target_field_name())[0] + path.append(PathInfo(from_field2, to_field2, from_field2.model._meta, + opts, from_field2)) - int_alias = self.join((alias, table1, from_col1, to_col1), - dupe_multis, exclusions, nullable=True, - reuse=can_reuse) - alias = self.join((int_alias, table2, from_col2, to_col2), - dupe_multis, exclusions, nullable=True, - reuse=can_reuse) - joins.extend([int_alias, alias]) - else: - # One-to-many field (ForeignKey defined on the target model) - if cached_data: - (table, from_col, to_col, opts, target) = cached_data - else: - local_field = opts.get_field_by_name( - field.rel.field_name)[0] - opts = orig_field.opts - table = opts.db_table - from_col = local_field.column - to_col = field.column - # In case of a recursive FK, use the to_field for - # reverse lookups as well - if orig_field.model is local_field.model: - target = opts.get_field_by_name( - field.rel.field_name)[0] - else: - target = opts.pk - orig_opts._join_cache[name] = (table, from_col, to_col, - opts, target) - - alias = self.join((alias, table, from_col, to_col), - dupe_multis, exclusions, nullable=True, - reuse=can_reuse) - joins.append(alias) - - for (dupe_opts, dupe_col) in dupe_set: - if int_alias is None: - to_avoid = alias - else: - to_avoid = int_alias - self.update_dupe_avoidance(dupe_opts, dupe_col, to_avoid) + if m2m and multijoin_pos is None: + multijoin_pos = pos + if not direct and not path[-1].to_field.unique and multijoin_pos is None: + multijoin_pos = pos if pos != len(names) - 1: if pos == len(names) - 2: - raise FieldError("Join on field %r not permitted. Did you misspell %r for the lookup type?" % (name, names[pos + 1])) + raise FieldError( + "Join on field %r not permitted. Did you misspell %r for " + "the lookup type?" % (name, names[pos + 1])) else: raise FieldError("Join on field %r not permitted." % name) + if multijoin_pos is not None and len(path) >= multijoin_pos and not allow_many: + raise MultiJoin(multijoin_pos + 1) + return path, final_field, target - return field, target, opts, joins, last, extra_filters - - def trim_joins(self, target, join_list, last, trim, nonnull_check=False): + def setup_joins(self, names, opts, alias, can_reuse, allow_many=True, + allow_explicit_fk=False): """ - Sometimes joins at the end of a multi-table sequence can be trimmed. If - the final join is against the same column as we are comparing against, - and is an inner join, we can go back one step in a join chain and - compare against the LHS of the join instead (and then repeat the - optimization). The result, potentially, involves fewer table joins. + Compute the necessary table joins for the passage through the fields + given in 'names'. 'opts' is the Options class for the current model + (which gives the table we are starting from), 'alias' is the alias for + the table to start the joining from. - The 'target' parameter is the final field being joined to, 'join_list' - is the full list of join aliases. + The 'can_reuse' defines the reverse foreign key joins we can reuse. It + can be sql.constants.REUSE_ALL in which case all joins are reusable + or a set of aliases that can be reused. Note that Non-reverse foreign + keys are always reusable. - The 'last' list contains offsets into 'join_list', corresponding to - each component of the filter. Many-to-many relations, for example, add - two tables to the join list and we want to deal with both tables the - same way, so 'last' has an entry for the first of the two tables and - then the table immediately after the second table, in that case. + If 'allow_many' is False, then any reverse foreign key seen will + generate a MultiJoin exception. - The 'trim' parameter forces the final piece of the join list to be - trimmed before anything. See the documentation of add_filter() for - details about this. + The 'allow_explicit_fk' controls if field.attname is allowed in the + lookups. - The 'nonnull_check' parameter is True when we are using inner joins - between tables explicitly to exclude NULL entries. In that case, the - tables shouldn't be trimmed, because the very action of joining to them - alters the result set. + Returns the final field involved in the joins, the target field (used + for any 'where' constraint), the final 'opts' value, the joins and the + field path travelled to generate the joins. + + The target field is the field containing the concrete value. Final + field can be something different, for example foreign key pointing to + that value. Final field is needed for example in some value + conversions (convert 'obj' in fk__id=obj to pk val using the foreign + key field for example). + """ + joins = [alias] + # First, generate the path for the names + path, final_field, target = self.names_to_path( + names, opts, allow_many, allow_explicit_fk) + # Then, add the path to the query's joins. Note that we can't trim + # joins at this stage - we will need the information about join type + # of the trimmed joins. + for pos, join in enumerate(path): + from_field, to_field, from_opts, opts, join_field = join + direct = join_field == from_field + if direct: + nullable = self.is_nullable(from_field) + else: + nullable = True + connection = alias, opts.db_table, from_field.column, to_field.column + alias = self.join(connection, reuse=can_reuse, nullable=nullable, + join_field=join_field) + joins.append(alias) + return final_field, target, opts, joins, path + + def trim_joins(self, target, joins, path): + """ + The 'target' parameter is the final field being joined to, 'joins' + is the full list of join aliases. The 'path' contain the PathInfos + used to create the joins. Returns the final active column and table alias and the new active - join_list. - """ - final = len(join_list) - penultimate = last.pop() - if penultimate == final: - penultimate = last.pop() - if trim and final > 1: - extra = join_list[penultimate:] - join_list = join_list[:penultimate] - final = penultimate - penultimate = last.pop() - col = self.alias_map[extra[0]].lhs_join_col - for alias in extra: - self.unref_alias(alias) - else: - col = target.column - alias = join_list[-1] - while final > 1: - join = self.alias_map[alias] - if (col != join.rhs_join_col or join.join_type != self.INNER or - nonnull_check): - break - self.unref_alias(alias) - alias = join.lhs_alias - col = join.lhs_join_col - join_list.pop() - final -= 1 - if final == penultimate: - penultimate = last.pop() - return col, alias, join_list + joins. - def update_dupe_avoidance(self, opts, col, alias): + We will always trim any direct join if we have the target column + available already in the previous table. Reverse joins can't be + trimmed as we don't know if there is anything on the other side of + the join. """ - For a column that is one of multiple pointing to the same table, update - the internal data structures to note that this alias shouldn't be used - for those other columns. - """ - ident = id(opts) - for name in opts.duplicate_targets[col]: - try: - self.dupe_avoidance[ident, name].add(alias) - except KeyError: - self.dupe_avoidance[ident, name] = set([alias]) + for info in reversed(path): + direct = info.join_field == info.from_field + if info.to_field == target and direct: + target = info.from_field + self.unref_alias(joins.pop()) + else: + break + return target.column, joins[-1], joins def split_exclude(self, filter_expr, prefix, can_reuse): """ @@ -1560,6 +1519,19 @@ class Query(object): to use a subquery. This method constructs the nested query, given the original exclude filter (filter_expr) and the portion up to the first N-to-many relation field. + + As an example we could have original filter ~Q(child__name='foo'). + We would get here with filter_expr = child_name, prefix = child and + can_reuse is a set of joins we can reuse for filtering in the original + query. + + We will turn this into + WHERE pk NOT IN (SELECT parent_id FROM thetable + WHERE name = 'foo' AND parent_id IS NOT NULL) + + It might be worth it to consider using WHERE NOT EXISTS as that has + saner null handling, and is easier for the backend's optimizer to + handle. """ query = Query(self.model) query.add_filter(filter_expr) @@ -1570,10 +1542,21 @@ class Query(object): # since we are adding a IN clause. This prevents the # database from tripping over IN (...,NULL,...) selects and returning # nothing - alias, col = query.select[0] + alias, col = query.select[0].col query.where.add((Constraint(alias, col, None), 'isnull', False), AND) + # We need to trim the last part from the prefix. + trimmed_prefix = LOOKUP_SEP.join(prefix.split(LOOKUP_SEP)[0:-1]) + if not trimmed_prefix: + rel, _, direct, m2m = self.model._meta.get_field_by_name(prefix) + if not m2m: + trimmed_prefix = rel.field.rel.field_name + else: + if direct: + trimmed_prefix = rel.m2m_target_field_name() + else: + trimmed_prefix = rel.field.m2m_reverse_target_field_name() - self.add_filter(('%s__in' % prefix, query), negate=True, trim=True, + self.add_filter(('%s__in' % trimmed_prefix, query), negate=True, can_reuse=can_reuse) # If there's more than one join in the inner query (before any initial @@ -1583,11 +1566,11 @@ class Query(object): # comparison to NULL (e.g. in # Tag.objects.exclude(parent__parent__name='t1'), a tag with no parent # would otherwise be overlooked). - active_positions = [pos for (pos, count) in - enumerate(six.itervalues(query.alias_refcount)) if count] - if active_positions[-1] > 1: - self.add_filter(('%s__isnull' % prefix, False), negate=True, - trim=True, can_reuse=can_reuse) + active_positions = len([count for count + in query.alias_refcount.items() if count]) + if active_positions > 1: + self.add_filter(('%s__isnull' % trimmed_prefix, False), negate=True, + can_reuse=can_reuse) def set_limits(self, low=None, high=None): """ @@ -1629,7 +1612,6 @@ class Query(object): Removes all fields from SELECT clause. """ self.select = [] - self.select_fields = [] self.default_cols = False self.select_related = False self.set_extra_mask(()) @@ -1642,7 +1624,6 @@ class Query(object): columns. """ self.select = [] - self.select_fields = [] def add_distinct_fields(self, *field_names): """ @@ -1661,8 +1642,8 @@ class Query(object): try: for name in field_names: - field, target, u2, joins, u3, u4 = self.setup_joins( - name.split(LOOKUP_SEP), opts, alias, False, allow_m2m, + field, target, u2, joins, u3 = self.setup_joins( + name.split(LOOKUP_SEP), opts, alias, REUSE_ALL, allow_m2m, True) final_alias = joins[-1] col = target.column @@ -1674,8 +1655,7 @@ class Query(object): col = join.lhs_join_col joins = joins[:-1] self.promote_joins(joins[1:]) - self.select.append((final_alias, col)) - self.select_fields.append(field) + self.select.append(SelectInfo((final_alias, col), field)) except MultiJoin: raise FieldError("Invalid field name: '%s'" % name) except FieldError: @@ -1731,8 +1711,8 @@ class Query(object): """ self.group_by = [] - for sel in self.select: - self.group_by.append(sel) + for col, _ in self.select: + self.group_by.append(col) def add_count_column(self): """ @@ -1745,7 +1725,7 @@ class Query(object): else: assert len(self.select) == 1, \ "Cannot add count col with multiple cols in 'select': %r" % self.select - count = self.aggregates_module.Count(self.select[0]) + count = self.aggregates_module.Count(self.select[0].col) else: opts = self.model._meta if not self.select: @@ -1757,7 +1737,7 @@ class Query(object): assert len(self.select) == 1, \ "Cannot add count col with multiple cols in 'select'." - count = self.aggregates_module.Count(self.select[0], distinct=True) + count = self.aggregates_module.Count(self.select[0].col, distinct=True) # Distinct handling is done in Count(), so don't do it at this # level. self.distinct = False @@ -1781,7 +1761,6 @@ class Query(object): d = d.setdefault(part, {}) self.select_related = field_dict self.related_select_cols = [] - self.related_select_fields = [] def add_extra(self, select, select_params, where, params, tables, order_by): """ @@ -1954,8 +1933,8 @@ class Query(object): """ opts = self.model._meta alias = self.get_initial_alias() - field, col, opts, joins, last, extra = self.setup_joins( - start.split(LOOKUP_SEP), opts, alias, False) + field, col, opts, joins, extra = self.setup_joins( + start.split(LOOKUP_SEP), opts, alias, REUSE_ALL) select_col = self.alias_map[joins[1]].lhs_join_col select_alias = alias @@ -1975,7 +1954,7 @@ class Query(object): self.unref_alias(select_alias) select_alias = join_info.rhs_alias select_col = join_info.rhs_join_col - self.select = [(select_alias, select_col)] + self.select = [SelectInfo((select_alias, select_col), None)] self.remove_inherited_models() def is_nullable(self, field): @@ -2011,18 +1990,6 @@ def get_order_dir(field, default='ASC'): return field, dirn[0] -def setup_join_cache(sender, **kwargs): - """ - The information needed to join between model fields is something that is - invariant over the life of the model, so we cache it in the model's Options - class, rather than recomputing it all the time. - - This method initialises the (empty) cache when the model is created. - """ - sender._meta._join_cache = {} - -signals.class_prepared.connect(setup_join_cache) - def add_to_dict(data, key, value): """ A helper function to add "value" to the set of values for "key", whether or @@ -2032,3 +1999,10 @@ def add_to_dict(data, key, value): data[key].add(value) else: data[key] = set([value]) + +def is_reverse_o2o(field): + """ + A little helper to check if the given field is reverse-o2o. The field is + expected to be some sort of relation field or related object. + """ + return not hasattr(field, 'rel') and field.field.unique diff --git a/django/db/models/sql/subqueries.py b/django/db/models/sql/subqueries.py index 24ac957cbf..39d1ee0116 100644 --- a/django/db/models/sql/subqueries.py +++ b/django/db/models/sql/subqueries.py @@ -76,7 +76,7 @@ class DeleteQuery(Query): return else: innerq.clear_select_clause() - innerq.select, innerq.select_fields = [(self.get_initial_alias(), pk.column)], [None] + innerq.select = [SelectInfo((self.get_initial_alias(), pk.column), None)] values = innerq where = self.where_class() where.add((Constraint(None, pk.column, pk), 'in', values), AND) @@ -244,7 +244,7 @@ class DateQuery(Query): alias = result[3][-1] select = Date((alias, field.column), lookup_type) self.clear_select_clause() - self.select, self.select_fields = [select], [None] + self.select = [SelectInfo(select, None)] self.distinct = True self.order_by = order == 'ASC' and [1] or [-1] diff --git a/django/db/utils.py b/django/db/utils.py index 5fa78fe350..842fd354d6 100644 --- a/django/db/utils.py +++ b/django/db/utils.py @@ -5,6 +5,7 @@ from threading import local from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module +from django.utils._os import upath from django.utils import six @@ -27,7 +28,7 @@ def load_backend(backend_name): except ImportError as e_user: # The database backend wasn't found. Display a helpful error message # listing all possible (built-in) database backends. - backend_dir = os.path.join(os.path.dirname(__file__), 'backends') + backend_dir = os.path.join(os.path.dirname(upath(__file__)), 'backends') try: builtin_backends = [ name for _, name, ispkg in pkgutil.iter_modules([backend_dir]) @@ -53,7 +54,14 @@ class ConnectionDoesNotExist(Exception): class ConnectionHandler(object): def __init__(self, databases): - self.databases = databases + if not databases: + self.databases = { + DEFAULT_DB_ALIAS: { + 'ENGINE': 'django.db.backends.dummy', + }, + } + else: + self.databases = databases self._connections = local() def ensure_defaults(self, alias): diff --git a/django/dispatch/dispatcher.py b/django/dispatch/dispatcher.py index 8d26e58bf4..65c5c408ff 100644 --- a/django/dispatch/dispatcher.py +++ b/django/dispatch/dispatcher.py @@ -10,6 +10,10 @@ def _make_id(target): if hasattr(target, '__func__'): return (id(target.__self__), id(target.__func__)) return id(target) +NONE_ID = _make_id(None) + +# A marker for caching +NO_RECEIVERS = object() class Signal(object): """ @@ -20,8 +24,7 @@ class Signal(object): receivers { receriverkey (id) : weakref(receiver) } """ - - def __init__(self, providing_args=None): + def __init__(self, providing_args=None, use_caching=False): """ Create a new signal. @@ -33,6 +36,13 @@ class Signal(object): providing_args = [] self.providing_args = set(providing_args) self.lock = threading.Lock() + self.use_caching = use_caching + # For convenience we create empty caches even if they are not used. + # A note about caching: if use_caching is defined, then for each + # distinct sender we cache the receivers that sender has in + # 'sender_receivers_cache'. The cache is cleaned when .connect() or + # .disconnect() is called and populated on send(). + self.sender_receivers_cache = {} def connect(self, receiver, sender=None, weak=True, dispatch_uid=None): """ @@ -106,6 +116,7 @@ class Signal(object): break else: self.receivers.append((lookup_key, receiver)) + self.sender_receivers_cache = {} def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None): """ @@ -140,9 +151,10 @@ class Signal(object): if r_key == lookup_key: del self.receivers[index] break + self.sender_receivers_cache = {} def has_listeners(self, sender=None): - return bool(self._live_receivers(_make_id(sender))) + return bool(self._live_receivers(sender)) def send(self, sender, **named): """ @@ -163,10 +175,10 @@ class Signal(object): Returns a list of tuple pairs [(receiver, response), ... ]. """ responses = [] - if not self.receivers: + if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS: return responses - for receiver in self._live_receivers(_make_id(sender)): + for receiver in self._live_receivers(sender): response = receiver(signal=self, sender=sender, **named) responses.append((receiver, response)) return responses @@ -195,12 +207,12 @@ class Signal(object): receiver. """ responses = [] - if not self.receivers: + if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS: return responses # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. - for receiver in self._live_receivers(_make_id(sender)): + for receiver in self._live_receivers(sender): try: response = receiver(signal=self, sender=sender, **named) except Exception as err: @@ -209,26 +221,43 @@ class Signal(object): responses.append((receiver, response)) return responses - def _live_receivers(self, senderkey): + def _live_receivers(self, sender): """ Filter sequence of receivers to get resolved, live receivers. This checks for weak references and resolves them, then returning only live receivers. """ - none_senderkey = _make_id(None) - receivers = [] - - for (receiverkey, r_senderkey), receiver in self.receivers: - if r_senderkey == none_senderkey or r_senderkey == senderkey: - if isinstance(receiver, WEAKREF_TYPES): - # Dereference the weak reference. - receiver = receiver() - if receiver is not None: + receivers = None + if self.use_caching: + receivers = self.sender_receivers_cache.get(sender) + # We could end up here with NO_RECEIVERS even if we do check this case in + # .send() prior to calling _live_receivers() due to concurrent .send() call. + if receivers is NO_RECEIVERS: + return [] + if receivers is None: + with self.lock: + senderkey = _make_id(sender) + receivers = [] + for (receiverkey, r_senderkey), receiver in self.receivers: + if r_senderkey == NONE_ID or r_senderkey == senderkey: receivers.append(receiver) - else: - receivers.append(receiver) - return receivers + if self.use_caching: + if not receivers: + self.sender_receivers_cache[sender] = NO_RECEIVERS + else: + # Note, we must cache the weakref versions. + self.sender_receivers_cache[sender] = receivers + non_weak_receivers = [] + for receiver in receivers: + if isinstance(receiver, WEAKREF_TYPES): + # Dereference the weak reference. + receiver = receiver() + if receiver is not None: + non_weak_receivers.append(receiver) + else: + non_weak_receivers.append(receiver) + return non_weak_receivers def _remove_receiver(self, receiver): """ @@ -246,8 +275,8 @@ class Signal(object): # after we delete some items for idx, (r_key, _) in enumerate(reversed(self.receivers)): if r_key == key: - del self.receivers[last_idx-idx] - + del self.receivers[last_idx - idx] + self.sender_receivers_cache = {} def receiver(signal, **kwargs): """ diff --git a/django/dispatch/saferef.py b/django/dispatch/saferef.py index 84d1b2183c..7423669c96 100644 --- a/django/dispatch/saferef.py +++ b/django/dispatch/saferef.py @@ -67,9 +67,9 @@ class BoundMethodWeakref(object): same BoundMethodWeakref instance. """ - + _allInstances = weakref.WeakValueDictionary() - + def __new__( cls, target, onDelete=None, *arguments,**named ): """Create new instance or return current instance @@ -92,7 +92,7 @@ class BoundMethodWeakref(object): cls._allInstances[key] = base base.__init__( target, onDelete, *arguments,**named) return base - + def __init__(self, target, onDelete=None): """Return a weak-reference-like instance for a bound method @@ -132,7 +132,7 @@ class BoundMethodWeakref(object): self.weakFunc = weakref.ref(target.__func__, remove) self.selfName = str(target.__self__) self.funcName = str(target.__func__.__name__) - + def calculateKey( cls, target ): """Calculate the reference key for this reference @@ -141,7 +141,7 @@ class BoundMethodWeakref(object): """ return (id(target.__self__),id(target.__func__)) calculateKey = classmethod( calculateKey ) - + def __str__(self): """Give a friendly representation of the object""" return """%s( %s.%s )"""%( @@ -157,14 +157,16 @@ class BoundMethodWeakref(object): def __bool__( self ): """Whether we are still a valid reference""" return self() is not None - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def __eq__(self, other): """Compare with another reference""" if not isinstance(other, self.__class__): return self.__class__ == type(other) return self.key == other.key - + def __call__(self): """Return a strong reference to the bound method diff --git a/django/forms/formsets.py b/django/forms/formsets.py index c646eed506..3893cc54ba 100644 --- a/django/forms/formsets.py +++ b/django/forms/formsets.py @@ -69,7 +69,9 @@ class BaseFormSet(object): def __bool__(self): """All formsets have a management form which is not included in the length""" return True - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) @property def management_form(self): @@ -265,7 +267,7 @@ class BaseFormSet(object): def is_valid(self): """ - Returns True if form.errors is empty for every form in self.forms. + Returns True if every form in self.forms is valid. """ if not self.is_bound: return False @@ -280,8 +282,7 @@ class BaseFormSet(object): # This form is going to be deleted so any of its errors # should not cause the entire formset to be invalid. continue - if bool(self.errors[i]): - forms_valid = False + forms_valid &= form.is_valid() return forms_valid and not bool(self.non_form_errors()) def full_clean(self): diff --git a/django/forms/models.py b/django/forms/models.py index 11fe0c09ea..e9b71ccf26 100644 --- a/django/forms/models.py +++ b/django/forms/models.py @@ -126,7 +126,7 @@ def model_to_dict(instance, fields=None, exclude=None): data[f.name] = [] else: # MultipleChoiceWidget needs a list of pks, not object instances. - data[f.name] = [obj.pk for obj in f.value_from_object(instance)] + data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True)) else: data[f.name] = f.value_from_object(instance) return data diff --git a/django/forms/widgets.py b/django/forms/widgets.py index 763da0cff2..c761ea857d 100644 --- a/django/forms/widgets.py +++ b/django/forms/widgets.py @@ -403,7 +403,7 @@ class Textarea(Widget): def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) - return format_html('{1}', + return format_html('\r\n{1}', flatatt(final_attrs), force_text(value)) @@ -528,7 +528,7 @@ class CheckboxInput(Widget): values = {'true': True, 'false': False} if isinstance(value, six.string_types): value = values.get(value.lower(), value) - return value + return bool(value) def _has_changed(self, initial, data): # Sometimes data or initial could be None or '' which should be the diff --git a/django/http/multipartparser.py b/django/http/multipartparser.py index 40aefd6e9d..edf98f6e49 100644 --- a/django/http/multipartparser.py +++ b/django/http/multipartparser.py @@ -110,7 +110,7 @@ class MultiPartParser(object): # HTTP spec says that Content-Length >= 0 is valid # handling content-length == 0 before continuing if self._content_length == 0: - return QueryDict(MultiValueDict(), encoding=self._encoding), MultiValueDict() + return QueryDict('', encoding=self._encoding), MultiValueDict() # See if the handler will want to take care of the parsing. # This allows overriding everything if somebody wants it. @@ -199,6 +199,12 @@ class MultiPartParser(object): for chunk in field_stream: if transfer_encoding == 'base64': # We only special-case base64 transfer encoding + # We should always read base64 streams by multiple of 4 + over_bytes = len(chunk) % 4 + if over_bytes: + over_chunk = field_stream.read(4 - over_bytes) + chunk += over_chunk + try: chunk = base64.b64decode(chunk) except Exception as e: @@ -256,7 +262,7 @@ class MultiPartParser(object): """Cleanup filename from Internet Explorer full paths.""" return filename and filename[filename.rfind("\\")+1:].strip() -class LazyStream(object): +class LazyStream(six.Iterator): """ The LazyStream wrapper allows one to get and "unget" bytes from a stream. @@ -323,8 +329,6 @@ class LazyStream(object): self.position += len(output) return output - next = __next__ # Python 2 compatibility - def close(self): """ Used to invalidate/disable this lazy stream. @@ -369,7 +373,7 @@ class LazyStream(object): " if there is none, report this to the Django developers." ) -class ChunkIter(object): +class ChunkIter(six.Iterator): """ An iterable that will yield chunks of data. Given a file-like object as the constructor, this object will yield chunks of read operations from that @@ -389,12 +393,10 @@ class ChunkIter(object): else: raise StopIteration() - next = __next__ # Python 2 compatibility - def __iter__(self): return self -class InterBoundaryIter(object): +class InterBoundaryIter(six.Iterator): """ A Producer that will iterate over boundaries. """ @@ -411,9 +413,7 @@ class InterBoundaryIter(object): except InputStreamExhausted: raise StopIteration() - next = __next__ # Python 2 compatibility - -class BoundaryIter(object): +class BoundaryIter(six.Iterator): """ A Producer that is sensitive to boundaries. @@ -489,8 +489,6 @@ class BoundaryIter(object): stream.unget(chunk[-rollback:]) return chunk[:-rollback] - next = __next__ # Python 2 compatibility - def _find_boundary(self, data, eof = False): """ Finds a multipart boundary in data. diff --git a/django/http/request.py b/django/http/request.py index 96c7606c86..8f74bddb71 100644 --- a/django/http/request.py +++ b/django/http/request.py @@ -25,6 +25,7 @@ from django.utils.encoding import force_bytes, force_text, force_str, iri_to_uri RAISE_ERROR = object() absolute_http_url_re = re.compile(r"^https?://", re.I) +host_validation_re = re.compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$") class UnreadablePostError(IOError): @@ -64,7 +65,7 @@ class HttpRequest(object): host = '%s:%s' % (host, server_port) # Disallow potentially poisoned hostnames. - if set(';/?@&=+$,').intersection(host): + if not host_validation_re.match(host.lower()): raise SuspiciousOperation('Invalid HTTP_HOST header: %s' % host) return host @@ -276,6 +277,9 @@ class QueryDict(MultiValueDict): encoding = settings.DEFAULT_CHARSET self.encoding = encoding if six.PY3: + if isinstance(query_string, bytes): + # query_string contains URL-encoded data, a subset of ASCII. + query_string = query_string.decode() for key, value in parse_qsl(query_string or '', keep_blank_values=True, encoding=encoding): diff --git a/django/http/response.py b/django/http/response.py index 56e3d00096..df0a955b18 100644 --- a/django/http/response.py +++ b/django/http/response.py @@ -23,7 +23,7 @@ class BadHeaderError(ValueError): pass -class HttpResponseBase(object): +class HttpResponseBase(six.Iterator): """ An HTTP response base class with dictionary-accessed headers. @@ -218,8 +218,6 @@ class HttpResponseBase(object): # Subclasses must define self._iterator for this function. return self.make_bytes(next(self._iterator)) - next = __next__ # Python 2 compatibility - # These methods partially implement the file-like object interface. # See http://docs.python.org/lib/bltin-file-objects.html diff --git a/django/middleware/cache.py b/django/middleware/cache.py index 34bf0ca4a4..de2b86f630 100644 --- a/django/middleware/cache.py +++ b/django/middleware/cache.py @@ -126,7 +126,6 @@ class FetchFromCacheMiddleware(object): def __init__(self): self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX - self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS self.cache = get_cache(self.cache_alias) diff --git a/django/middleware/common.py b/django/middleware/common.py index 6fbbf43044..ccc9fbfaad 100644 --- a/django/middleware/common.py +++ b/django/middleware/common.py @@ -6,6 +6,7 @@ from django.conf import settings from django import http from django.core.mail import mail_managers from django.utils.http import urlquote +from django.utils import six from django.core import urlresolvers @@ -87,7 +88,17 @@ class CommonMiddleware(object): else: newurl = urlquote(new_url[1]) if request.META.get('QUERY_STRING', ''): - newurl += '?' + request.META['QUERY_STRING'] + if six.PY3: + newurl += '?' + request.META['QUERY_STRING'] + else: + # `query_string` is a bytestring. Appending it to the unicode + # string `newurl` will fail if it isn't ASCII-only. This isn't + # allowed; only broken software generates such query strings. + # Better drop the invalid query string than crash (#15152). + try: + newurl += '?' + request.META['QUERY_STRING'].decode() + except UnicodeDecodeError: + pass return http.HttpResponsePermanentRedirect(newurl) def process_response(self, request, response): diff --git a/django/shortcuts/__init__.py b/django/shortcuts/__init__.py index a824446b7e..9f896347a4 100644 --- a/django/shortcuts/__init__.py +++ b/django/shortcuts/__init__.py @@ -7,6 +7,7 @@ for convenience's sake. from django.template import loader, RequestContext from django.http import HttpResponse, Http404 from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect +from django.db.models.base import ModelBase from django.db.models.manager import Manager from django.db.models.query import QuerySet from django.core import urlresolvers @@ -46,7 +47,7 @@ def render(request, *args, **kwargs): def redirect(to, *args, **kwargs): """ - Returns an HttpResponseRedirect to the apropriate URL for the arguments + Returns an HttpResponseRedirect to the appropriate URL for the arguments passed. The arguments could be: @@ -72,13 +73,20 @@ def _get_queryset(klass): """ Returns a QuerySet from a Model, Manager, or QuerySet. Created to make get_object_or_404 and get_list_or_404 more DRY. + + Raises a ValueError if klass is not a Model, Manager, or QuerySet. """ if isinstance(klass, QuerySet): return klass elif isinstance(klass, Manager): manager = klass - else: + elif isinstance(klass, ModelBase): manager = klass._default_manager + else: + klass__name = klass.__name__ if isinstance(klass, type) \ + else klass.__class__.__name__ + raise ValueError("Object is of type '%s', but must be a Django Model, " + "Manager, or QuerySet" % klass__name) return manager.all() def get_object_or_404(klass, *args, **kwargs): diff --git a/django/template/defaultfilters.py b/django/template/defaultfilters.py index e15440f90e..dac4e5ddb4 100644 --- a/django/template/defaultfilters.py +++ b/django/template/defaultfilters.py @@ -704,7 +704,7 @@ def get_digit(value, arg): @register.filter(expects_localtime=True, is_safe=False) def date(value, arg=None): """Formats a date according to the given format.""" - if not value: + if value in (None, ''): return '' if arg is None: arg = settings.DATE_FORMAT diff --git a/django/template/defaulttags.py b/django/template/defaulttags.py index fa2e840cbf..aca2f41f2d 100644 --- a/django/template/defaulttags.py +++ b/django/template/defaulttags.py @@ -398,10 +398,14 @@ class URLNode(Node): view_name = self.view_name.resolve(context) + if not view_name: + raise NoReverseMatch("'url' requires a non-empty first argument. " + "The syntax changed in Django 1.5, see the docs.") + # Try to look up the URL twice: once given the view name, and again # relative to what we guess is the "main" app. If they both fail, # re-raise the NoReverseMatch unless we're using the - # {% url ... as var %} construct in which cause return nothing. + # {% url ... as var %} construct in which case return nothing. url = '' try: url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app) @@ -1258,7 +1262,12 @@ def url(parser, token): if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument" " (path to a view)" % bits[0]) - viewname = parser.compile_filter(bits[1]) + try: + viewname = parser.compile_filter(bits[1]) + except TemplateSyntaxError as exc: + exc.args = (exc.args[0] + ". " + "The syntax of 'url' changed in Django 1.5, see the docs."), + raise args = [] kwargs = {} asvar = None diff --git a/django/template/response.py b/django/template/response.py index 2cb44d127d..3b3b41331a 100644 --- a/django/template/response.py +++ b/django/template/response.py @@ -40,7 +40,7 @@ class SimpleTemplateResponse(HttpResponse): rendered, and that the pickled state only includes rendered data, not the data used to construct the response. """ - obj_dict = self.__dict__.copy() + obj_dict = super(SimpleTemplateResponse, self).__getstate__() if not self._is_rendered: raise ContentNotRenderedError('The response content must be ' 'rendered before it can be pickled.') diff --git a/django/test/signals.py b/django/test/signals.py index d140304f1d..a96bdff3b3 100644 --- a/django/test/signals.py +++ b/django/test/signals.py @@ -5,6 +5,7 @@ from django.conf import settings from django.db import connections from django.dispatch import receiver, Signal from django.utils import timezone +from django.utils.functional import empty template_rendered = Signal(providing_args=["template", "context"]) @@ -72,3 +73,9 @@ def language_changed(**kwargs): trans_real._default = None if kwargs['setting'] == 'LOCALE_PATHS': trans_real._translations = {} + +@receiver(setting_changed) +def file_storage_changed(**kwargs): + if kwargs['setting'] in ('MEDIA_ROOT', 'DEFAULT_FILE_STORAGE'): + from django.core.files.storage import default_storage + default_storage._wrapped = empty diff --git a/django/test/testcases.py b/django/test/testcases.py index 1239275264..3af6b8c346 100644 --- a/django/test/testcases.py +++ b/django/test/testcases.py @@ -241,6 +241,40 @@ class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext): class SimpleTestCase(ut2.TestCase): + def __call__(self, result=None): + """ + Wrapper around default __call__ method to perform common Django test + set up. This means that user-defined Test Cases aren't required to + include a call to super().setUp(). + """ + testMethod = getattr(self, self._testMethodName) + skipped = (getattr(self.__class__, "__unittest_skip__", False) or + getattr(testMethod, "__unittest_skip__", False)) + + if not skipped: + try: + self._pre_setup() + except (KeyboardInterrupt, SystemExit): + raise + except Exception: + result.addError(self, sys.exc_info()) + return + super(SimpleTestCase, self).__call__(result) + if not skipped: + try: + self._post_teardown() + except (KeyboardInterrupt, SystemExit): + raise + except Exception: + result.addError(self, sys.exc_info()) + return + + def _pre_setup(self): + pass + + def _post_teardown(self): + pass + def save_warnings_state(self): """ Saves the state of the warnings module @@ -412,10 +446,20 @@ class TransactionTestCase(SimpleTestCase): ROOT_URLCONF with it. * Clearing the mail test outbox. """ + self.client = self.client_class() self._fixture_setup() self._urlconf_setup() mail.outbox = [] + def _databases_names(self, include_mirrors=True): + # If the test case has a multi_db=True flag, act on all databases, + # including mirrors or not. Otherwise, just on the default DB. + if getattr(self, 'multi_db', False): + return [alias for alias in connections + if include_mirrors or not connections[alias].settings_dict['TEST_MIRROR']] + else: + return [DEFAULT_DB_ALIAS] + def _reset_sequences(self, db_name): conn = connections[db_name] if conn.features.supports_sequence_reset: @@ -433,10 +477,7 @@ class TransactionTestCase(SimpleTestCase): transaction.commit_unless_managed(using=db_name) def _fixture_setup(self): - # If the test case has a multi_db=True flag, act on all databases. - # Otherwise, just on the default DB. - db_names = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS] - for db_name in db_names: + for db_name in self._databases_names(include_mirrors=False): # Reset sequences if self.reset_sequences: self._reset_sequences(db_name) @@ -453,35 +494,6 @@ class TransactionTestCase(SimpleTestCase): settings.ROOT_URLCONF = self.urls clear_url_caches() - def __call__(self, result=None): - """ - Wrapper around default __call__ method to perform common Django test - set up. This means that user-defined Test Cases aren't required to - include a call to super().setUp(). - """ - testMethod = getattr(self, self._testMethodName) - skipped = (getattr(self.__class__, "__unittest_skip__", False) or - getattr(testMethod, "__unittest_skip__", False)) - - if not skipped: - self.client = self.client_class() - try: - self._pre_setup() - except (KeyboardInterrupt, SystemExit): - raise - except Exception: - result.addError(self, sys.exc_info()) - return - super(TransactionTestCase, self).__call__(result) - if not skipped: - try: - self._post_teardown() - except (KeyboardInterrupt, SystemExit): - raise - except Exception: - result.addError(self, sys.exc_info()) - return - def _post_teardown(self): """ Performs any post-test things. This includes: @@ -502,10 +514,12 @@ class TransactionTestCase(SimpleTestCase): conn.close() def _fixture_teardown(self): - # If the test case has a multi_db=True flag, flush all databases. - # Otherwise, just flush default. - databases = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS] - for db in databases: + # Roll back any pending transactions in order to avoid a deadlock + # during flush when TEST_MIRROR is used (#18984). + for conn in connections.all(): + conn.rollback_unless_managed() + + for db in self._databases_names(include_mirrors=False): call_command('flush', verbosity=0, interactive=False, database=db, skip_validation=True, reset_sequences=False) @@ -753,6 +767,12 @@ class TransactionTestCase(SimpleTestCase): items = six.moves.map(transform, qs) if not ordered: return self.assertEqual(set(items), set(values)) + values = list(values) + # For example qs.iterator() could be passed as qs, but it does not + # have 'ordered' attribute. + if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered: + raise ValueError("Trying to compare non-ordered queryset " + "against more than one ordered values") return self.assertEqual(list(items), values) def assertNumQueries(self, num, func=None, *args, **kwargs): @@ -790,11 +810,7 @@ class TestCase(TransactionTestCase): assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances' - # If the test case has a multi_db=True flag, setup all databases. - # Otherwise, just use default. - db_names = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS] - - for db_name in db_names: + for db_name in self._databases_names(): transaction.enter_transaction_management(using=db_name) transaction.managed(True, using=db_name) disable_transaction_methods() @@ -802,7 +818,7 @@ class TestCase(TransactionTestCase): from django.contrib.sites.models import Site Site.objects.clear_cache() - for db in db_names: + for db in self._databases_names(include_mirrors=False): if hasattr(self, 'fixtures'): call_command('loaddata', *self.fixtures, **{ @@ -816,15 +832,8 @@ class TestCase(TransactionTestCase): if not connections_support_transactions(): return super(TestCase, self)._fixture_teardown() - # If the test case has a multi_db=True flag, teardown all databases. - # Otherwise, just teardown default. - if getattr(self, 'multi_db', False): - databases = connections - else: - databases = [DEFAULT_DB_ALIAS] - restore_transaction_methods() - for db in databases: + for db in self._databases_names(): transaction.rollback(using=db) transaction.leave_transaction_management(using=db) @@ -1025,6 +1034,7 @@ class LiveServerThread(threading.Thread): (self.host, port), QuietWSGIRequestHandler) except WSGIServerException as e: if (index + 1 < len(self.possible_ports) and + hasattr(e.args[0], 'errno') and e.args[0].errno == errno.EADDRINUSE): # This port is already in use, so we go on and try with # the next one in the list. @@ -1077,7 +1087,7 @@ class LiveServerTestCase(TransactionTestCase): for conn in connections.all(): # If using in-memory sqlite databases, pass the connections to # the server thread. - if (conn.settings_dict['ENGINE'] == 'django.db.backends.sqlite3' + if (conn.settings_dict['ENGINE'].rsplit('.', 1)[-1] in ('sqlite3', 'spatialite') and conn.settings_dict['NAME'] == ':memory:'): # Explicitly enable thread-shareability for this connection conn.allow_thread_sharing = True @@ -1129,7 +1139,7 @@ class LiveServerTestCase(TransactionTestCase): # Restore sqlite connections' non-sharability for conn in connections.all(): - if (conn.settings_dict['ENGINE'] == 'django.db.backends.sqlite3' + if (conn.settings_dict['ENGINE'].rsplit('.', 1)[-1] in ('sqlite3', 'spatialite') and conn.settings_dict['NAME'] == ':memory:'): conn.allow_thread_sharing = False diff --git a/django/test/utils.py b/django/test/utils.py index f10d388227..8114ae0e6a 100644 --- a/django/test/utils.py +++ b/django/test/utils.py @@ -187,8 +187,12 @@ class override_settings(object): self.disable() def __call__(self, test_func): - from django.test import TransactionTestCase - if isinstance(test_func, type) and issubclass(test_func, TransactionTestCase): + from django.test import SimpleTestCase + if isinstance(test_func, type): + if not issubclass(test_func, SimpleTestCase): + raise Exception( + "Only subclasses of Django SimpleTestCase can be decorated " + "with override_settings") original_pre_setup = test_func._pre_setup original_post_teardown = test_func._post_teardown diff --git a/django/utils/_os.py b/django/utils/_os.py index 1ea12aed8a..6c1cd17a83 100644 --- a/django/utils/_os.py +++ b/django/utils/_os.py @@ -1,6 +1,8 @@ import os import stat +import sys from os.path import join, normcase, normpath, abspath, isabs, sep, dirname + from django.utils.encoding import force_text from django.utils import six @@ -10,6 +12,9 @@ except NameError: class WindowsError(Exception): pass +if not six.PY3: + fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() + # Under Python 2, define our own abspath function that can handle joining # unicode paths to a current working directory that has non-ASCII characters @@ -29,6 +34,23 @@ else: path = join(os.getcwdu(), path) return normpath(path) +def upath(path): + """ + Always return a unicode path. + """ + if not six.PY3: + return path.decode(fs_encoding) + return path + +def npath(path): + """ + Always return a native path, that is unicode on Python 3 and bytestring on + Python 2. + """ + if not six.PY3 and not isinstance(path, bytes): + return path.encode(fs_encoding) + return path + def safe_join(base, *paths): """ Joins one or more path components to the base path component intelligently. @@ -74,4 +96,3 @@ def rmtree_errorhandler(func, path, exc_info): os.chmod(path, stat.S_IWRITE) # use the original function to repeat the operation func(path) - diff --git a/django/utils/autoreload.py b/django/utils/autoreload.py index 2daafedd85..617fc9da7d 100644 --- a/django/utils/autoreload.py +++ b/django/utils/autoreload.py @@ -28,7 +28,7 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import os, sys, time, signal +import os, sys, time, signal, traceback try: from django.utils.six.moves import _thread as thread @@ -52,10 +52,17 @@ RUN_RELOADER = True _mtimes = {} _win = (sys.platform == "win32") +_error_files = [] + def code_changed(): global _mtimes, _win - filenames = [getattr(m, "__file__", None) for m in sys.modules.values()] - for filename in filter(None, filenames): + filenames = [] + for m in sys.modules.values(): + try: + filenames.append(m.__file__) + except AttributeError: + pass + for filename in filenames + _error_files: if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = filename[:-1] if filename.endswith("$py.class"): @@ -71,9 +78,34 @@ def code_changed(): continue if mtime != _mtimes[filename]: _mtimes = {} + try: + del _error_files[_error_files.index(filename)] + except ValueError: + pass return True return False +def check_errors(fn): + def wrapper(*args, **kwargs): + try: + fn(*args, **kwargs) + except (ImportError, IndentationError, NameError, SyntaxError, + TypeError, AttributeError): + et, ev, tb = sys.exc_info() + + if getattr(ev, 'filename', None) is None: + # get the filename from the last item in the stack + filename = traceback.extract_tb(tb)[-1][0] + else: + filename = ev.filename + + if filename not in _error_files: + _error_files.append(filename) + + raise + + return wrapper + def ensure_echo_on(): if termios: fd = sys.stdin @@ -142,5 +174,7 @@ def main(main_func, args=None, kwargs=None): reloader = jython_reloader else: reloader = python_reloader - reloader(main_func, args, kwargs) + + wrapped_main_func = check_errors(main_func) + reloader(wrapped_main_func, args, kwargs) diff --git a/django/utils/datastructures.py b/django/utils/datastructures.py index d94a05dfb4..f81fb88a19 100644 --- a/django/utils/datastructures.py +++ b/django/utils/datastructures.py @@ -1,6 +1,5 @@ import copy import warnings -from types import GeneratorType from django.utils import six @@ -120,27 +119,23 @@ class SortedDict(dict): return instance def __init__(self, data=None): - if data is None: - data = {} - elif isinstance(data, GeneratorType): - # Unfortunately we need to be able to read a generator twice. Once - # to get the data into self with our super().__init__ call and a - # second time to setup keyOrder correctly - data = list(data) - super(SortedDict, self).__init__(data) - if isinstance(data, dict): - self.keyOrder = list(data) + if data is None or isinstance(data, dict): + data = data or [] + super(SortedDict, self).__init__(data) + self.keyOrder = list(data) if data else [] else: - self.keyOrder = [] - seen = set() + super(SortedDict, self).__init__() + super_set = super(SortedDict, self).__setitem__ for key, value in data: - if key not in seen: + # Take the ordering from first key + if key not in self: self.keyOrder.append(key) - seen.add(key) + # But override with last value in data (dict() does this) + super_set(key, value) def __deepcopy__(self, memo): return self.__class__([(key, copy.deepcopy(value, memo)) - for key, value in six.iteritems(self)]) + for key, value in self.items()]) def __copy__(self): # The Python's default copy implementation will alter the state @@ -199,13 +194,13 @@ class SortedDict(dict): itervalues = _itervalues def items(self): - return list(self.iteritems()) + return [(k, self[k]) for k in self.keyOrder] def keys(self): - return list(self.iterkeys()) + return self.keyOrder[:] def values(self): - return list(self.itervalues()) + return [self[k] for k in self.keyOrder] def update(self, dict_): for k, v in six.iteritems(dict_): diff --git a/django/utils/encoding.py b/django/utils/encoding.py index b0872471c2..15215849b2 100644 --- a/django/utils/encoding.py +++ b/django/utils/encoding.py @@ -216,7 +216,7 @@ def iri_to_uri(iri): return quote(force_bytes(iri), safe=b"/#%[]=:;$&()+,!?*@'~") def filepath_to_uri(path): - """Convert an file system path to a URI portion that is suitable for + """Convert a file system path to a URI portion that is suitable for inclusion in a URL. We are assuming input is either UTF-8 or unicode already. diff --git a/django/utils/formats.py b/django/utils/formats.py index 555982eede..03b9918edf 100644 --- a/django/utils/formats.py +++ b/django/utils/formats.py @@ -16,6 +16,17 @@ from django.utils.translation import get_language, to_locale, check_for_language _format_cache = {} _format_modules_cache = {} +ISO_INPUT_FORMATS = { + 'DATE_INPUT_FORMATS': ('%Y-%m-%d',), + 'TIME_INPUT_FORMATS': ('%H:%M:%S', '%H:%M'), + 'DATETIME_INPUT_FORMATS': ( + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d %H:%M', + '%Y-%m-%d' + ), +} + def reset_format_cache(): """Clear any cached formats. @@ -82,6 +93,11 @@ def get_format(format_type, lang=None, use_l10n=None): for module in get_format_modules(lang): try: val = getattr(module, format_type) + for iso_input in ISO_INPUT_FORMATS.get(format_type, ()): + if iso_input not in val: + if isinstance(val, tuple): + val = list(val) + val.append(iso_input) _format_cache[cache_key] = val return val except AttributeError: diff --git a/django/utils/functional.py b/django/utils/functional.py index 505931e158..661518e3cc 100644 --- a/django/utils/functional.py +++ b/django/utils/functional.py @@ -5,9 +5,9 @@ import sys from django.utils import six -# You can't trivially replace this `functools.partial` because this binds to -# classes and returns bound instances, whereas functools.partial (on CPython) -# is a type and its instances don't bind. +# You can't trivially replace this with `functools.partial` because this binds +# to classes and returns bound instances, whereas functools.partial (on +# CPython) is a type and its instances don't bind. def curry(_curried_func, *args, **kwargs): def _curried(*moreargs, **morekwargs): return _curried_func(*(args+moreargs), **dict(kwargs, **morekwargs)) @@ -33,8 +33,8 @@ def memoize(func, cache, num_args): class cached_property(object): """ - Decorator that creates converts a method with a single - self argument into a property cached on the instance. + Decorator that converts a method with a single self argument into a + property cached on the instance. """ def __init__(self, func): self.func = func diff --git a/django/utils/html.py b/django/utils/html.py index cc8372906b..25605bea04 100644 --- a/django/utils/html.py +++ b/django/utils/html.py @@ -18,7 +18,7 @@ from django.utils.text import normalize_newlines # Configuration for urlize() function. TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)'] -WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('<', '>')] +WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('<', '>')] # List of possible strings used for bullets in bulleted lists. DOTS = ['·', '*', '\u2022', '•', '•', '•'] @@ -33,6 +33,7 @@ link_target_attribute_re = re.compile(r'(]*?)target=[^\s>]+') html_gunk_re = re.compile(r'(?:
|<\/i>|<\/b>|<\/em>|<\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE) hard_coded_bullets_re = re.compile(r'((?:

(?:%s).*?[a-zA-Z].*?

\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL) trailing_empty_content_re = re.compile(r'(?:

(?: |\s|
)*?

\s*)+\Z') +strip_tags_re = re.compile(r'])*?>', re.IGNORECASE) def escape(text): @@ -117,7 +118,7 @@ linebreaks = allow_lazy(linebreaks, six.text_type) def strip_tags(value): """Returns the given HTML with all tags stripped.""" - return re.sub(r'<[^>]*?>', '', force_text(value)) + return strip_tags_re.sub('', force_text(value)) strip_tags = allow_lazy(strip_tags) def remove_tags(html, tags): @@ -149,13 +150,17 @@ fix_ampersands = allow_lazy(fix_ampersands, six.text_type) def smart_urlquote(url): "Quotes a URL if it isn't already quoted." # Handle IDN before quoting. - scheme, netloc, path, query, fragment = urlsplit(url) try: - netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE - except UnicodeError: # invalid domain part + scheme, netloc, path, query, fragment = urlsplit(url) + try: + netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE + except UnicodeError: # invalid domain part + pass + else: + url = urlunsplit((scheme, netloc, path, query, fragment)) + except ValueError: + # invalid IPv6 URL (normally square brackets in hostname part). pass - else: - url = urlunsplit((scheme, netloc, path, query, fragment)) # An URL is considered unquoted if it contains no % characters or # contains a % not followed by two hexadecimal digits. See #9655. diff --git a/django/utils/http.py b/django/utils/http.py index 1c3b0039b5..0ab5198804 100644 --- a/django/utils/http.py +++ b/django/utils/http.py @@ -227,3 +227,15 @@ def same_origin(url1, url2): """ p1, p2 = urllib_parse.urlparse(url1), urllib_parse.urlparse(url2) return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port) + +def is_safe_url(url, host=None): + """ + Return ``True`` if the url is a safe redirection (i.e. it doesn't point to + a different host). + + Always returns ``False`` on an empty url. + """ + if not url: + return False + netloc = urllib_parse.urlparse(url)[1] + return not netloc or netloc == host diff --git a/django/utils/log.py b/django/utils/log.py index ea0122794b..736154a178 100644 --- a/django/utils/log.py +++ b/django/utils/log.py @@ -3,6 +3,7 @@ import traceback from django.conf import settings from django.core import mail +from django.core.mail import get_connection from django.views.debug import ExceptionReporter, get_exception_reporter_filter @@ -39,7 +40,7 @@ DEFAULT_LOGGING = { }, }, 'handlers': { - 'console':{ + 'console': { 'level': 'INFO', 'filters': ['require_debug_true'], 'class': 'logging.StreamHandler', @@ -62,6 +63,9 @@ DEFAULT_LOGGING = { 'level': 'ERROR', 'propagate': False, }, + 'py.warnings': { + 'handlers': ['console'], + }, } } @@ -73,9 +77,10 @@ class AdminEmailHandler(logging.Handler): request data will be provided in the email report. """ - def __init__(self, include_html=False): + def __init__(self, include_html=False, email_backend=None): logging.Handler.__init__(self) self.include_html = include_html + self.email_backend = email_backend def emit(self, record): try: @@ -107,7 +112,12 @@ class AdminEmailHandler(logging.Handler): message = "%s\n\n%s" % (stack_trace, request_repr) reporter = ExceptionReporter(request, is_email=True, *exc_info) html_message = self.include_html and reporter.get_traceback_html() or None - mail.mail_admins(subject, message, fail_silently=True, html_message=html_message) + mail.mail_admins(subject, message, fail_silently=True, + html_message=html_message, + connection=self.connection()) + + def connection(self): + return get_connection(backend=self.email_backend) def format_subject(self, subject): """ diff --git a/django/utils/translation/trans_real.py b/django/utils/translation/trans_real.py index 9e94840ee0..cf6270cc0c 100644 --- a/django/utils/translation/trans_real.py +++ b/django/utils/translation/trans_real.py @@ -10,6 +10,7 @@ from threading import local from django.utils.importlib import import_module from django.utils.encoding import force_str, force_text +from django.utils._os import upath from django.utils.safestring import mark_safe, SafeData from django.utils import six from django.utils.six import StringIO @@ -109,7 +110,7 @@ def translation(language): from django.conf import settings - globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') + globalpath = os.path.join(os.path.dirname(upath(sys.modules[settings.__module__].__file__)), 'locale') def _fetch(lang, fallback=None): @@ -151,7 +152,7 @@ def translation(language): for appname in reversed(settings.INSTALLED_APPS): app = import_module(appname) - apppath = os.path.join(os.path.dirname(app.__file__), 'locale') + apppath = os.path.join(os.path.dirname(upath(app.__file__)), 'locale') if os.path.isdir(apppath): res = _merge(apppath) @@ -246,7 +247,8 @@ def do_translate(message, translation_function): """ global _default - eol_message = message.replace('\r\n', '\n').replace('\r', '\n') + # str() is allowing a bytestring message to remain bytestring on Python 2 + eol_message = message.replace(str('\r\n'), str('\n')).replace(str('\r'), str('\n')) t = getattr(_active, "value", None) if t is not None: result = getattr(t, translation_function)(eol_message) @@ -336,7 +338,7 @@ def all_locale_paths(): """ from django.conf import settings globalpath = os.path.join( - os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') + os.path.dirname(upath(sys.modules[settings.__module__].__file__)), 'locale') return [globalpath] + list(settings.LOCALE_PATHS) def check_for_language(lang_code): diff --git a/django/utils/tree.py b/django/utils/tree.py index 717181d2b9..ce490224e0 100644 --- a/django/utils/tree.py +++ b/django/utils/tree.py @@ -73,7 +73,9 @@ class Node(object): For truth value testing. """ return bool(self.children) - __nonzero__ = __bool__ # Python 2 + + def __nonzero__(self): # Python 2 compatibility + return type(self).__bool__(self) def __contains__(self, other): """ diff --git a/django/views/generic/base.py b/django/views/generic/base.py index 23e18c54a0..9c82a29d8a 100644 --- a/django/views/generic/base.py +++ b/django/views/generic/base.py @@ -54,13 +54,17 @@ class View(object): "keyword argument to %s(). Don't do that." % (key, cls.__name__)) if not hasattr(cls, key): - raise TypeError("%s() received an invalid keyword %r" % ( - cls.__name__, key)) + raise TypeError("%s() received an invalid keyword %r. as_view " + "only accepts arguments that are already " + "attributes of the class." % (cls.__name__, key)) def view(request, *args, **kwargs): self = cls(**initkwargs) if hasattr(self, 'get') and not hasattr(self, 'head'): self.head = self.get + self.request = request + self.args = args + self.kwargs = kwargs return self.dispatch(request, *args, **kwargs) # take name and docstring from class @@ -79,9 +83,6 @@ class View(object): handler = getattr(self, request.method.lower(), self.http_method_not_allowed) else: handler = self.http_method_not_allowed - self.request = request - self.args = args - self.kwargs = kwargs return handler(request, *args, **kwargs) def http_method_not_allowed(self, request, *args, **kwargs): diff --git a/django/views/generic/edit.py b/django/views/generic/edit.py index e51cdf5a3d..97a6c0a698 100644 --- a/django/views/generic/edit.py +++ b/django/views/generic/edit.py @@ -1,6 +1,7 @@ from django.forms import models as model_forms from django.core.exceptions import ImproperlyConfigured from django.http import HttpResponseRedirect +from django.utils.encoding import force_text from django.views.generic.base import TemplateResponseMixin, ContextMixin, View from django.views.generic.detail import (SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView) @@ -50,7 +51,8 @@ class FormMixin(ContextMixin): Returns the supplied success URL. """ if self.success_url: - url = self.success_url + # Forcing possible reverse_lazy evaluation + url = force_text(self.success_url) else: raise ImproperlyConfigured( "No URL to redirect to. Provide a success_url.") diff --git a/django/views/generic/list.py b/django/views/generic/list.py index ec30c58f29..1f286168f6 100644 --- a/django/views/generic/list.py +++ b/django/views/generic/list.py @@ -15,8 +15,10 @@ class MultipleObjectMixin(ContextMixin): queryset = None model = None paginate_by = None + paginate_orphans = 0 context_object_name = None paginator_class = Paginator + page_kwarg = 'page' def get_queryset(self): """ @@ -38,8 +40,11 @@ class MultipleObjectMixin(ContextMixin): """ Paginate the queryset, if needed. """ - paginator = self.get_paginator(queryset, page_size, allow_empty_first_page=self.get_allow_empty()) - page = self.kwargs.get('page') or self.request.GET.get('page') or 1 + paginator = self.get_paginator( + queryset, page_size, orphans=self.get_paginate_orphans(), + allow_empty_first_page=self.get_allow_empty()) + page_kwarg = self.page_kwarg + page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1 try: page_number = int(page) except ValueError: @@ -50,9 +55,10 @@ class MultipleObjectMixin(ContextMixin): try: page = paginator.page(page_number) return (paginator, page, page.object_list, page.has_other_pages()) - except InvalidPage: - raise Http404(_('Invalid page (%(page_number)s)') % { - 'page_number': page_number + except InvalidPage as e: + raise Http404(_('Invalid page (%(page_number)s): %(message)s') % { + 'page_number': page_number, + 'message': str(e) }) def get_paginate_by(self, queryset): @@ -61,11 +67,21 @@ class MultipleObjectMixin(ContextMixin): """ return self.paginate_by - def get_paginator(self, queryset, per_page, orphans=0, allow_empty_first_page=True): + def get_paginator(self, queryset, per_page, orphans=0, + allow_empty_first_page=True, **kwargs): """ Return an instance of the paginator for this view. """ - return self.paginator_class(queryset, per_page, orphans=orphans, allow_empty_first_page=allow_empty_first_page) + return self.paginator_class( + queryset, per_page, orphans=orphans, + allow_empty_first_page=allow_empty_first_page, **kwargs) + + def get_paginate_orphans(self): + """ + Returns the maximum number of orphans extend the last page by when + paginating. + """ + return self.paginate_orphans def get_allow_empty(self): """ diff --git a/django/views/i18n.py b/django/views/i18n.py index 00ef224254..c87e3a82db 100644 --- a/django/views/i18n.py +++ b/django/views/i18n.py @@ -8,6 +8,8 @@ from django.utils.translation import check_for_language, activate, to_locale, ge from django.utils.text import javascript_quote from django.utils.encoding import smart_text from django.utils.formats import get_format_modules, get_format +from django.utils._os import upath +from django.utils.http import is_safe_url from django.utils import six def set_language(request): @@ -21,11 +23,11 @@ def set_language(request): redirect to the page in the request (the 'next' parameter) without changing any state. """ - next = request.REQUEST.get('next', None) - if not next: - next = request.META.get('HTTP_REFERER', None) - if not next: - next = '/' + next = request.REQUEST.get('next') + if not is_safe_url(url=next, host=request.get_host()): + next = request.META.get('HTTP_REFERER') + if not is_safe_url(url=next, host=request.get_host()): + next = '/' response = http.HttpResponseRedirect(next) if request.method == 'POST': lang_code = request.POST.get('language', None) @@ -99,16 +101,16 @@ function ngettext(singular, plural, count) { function gettext_noop(msgid) { return msgid; } function pgettext(context, msgid) { - var value = gettext(context + '\x04' + msgid); - if (value.indexOf('\x04') != -1) { + var value = gettext(context + '\\x04' + msgid); + if (value.indexOf('\\x04') != -1) { value = msgid; } return value; } function npgettext(context, singular, plural, count) { - var value = ngettext(context + '\x04' + singular, context + '\x04' + plural, count); - if (value.indexOf('\x04') != -1) { + var value = ngettext(context + '\\x04' + singular, context + '\\x04' + plural, count); + if (value.indexOf('\\x04') != -1) { value = ngettext(singular, plural, count); } return value; @@ -197,7 +199,7 @@ def javascript_catalog(request, domain='djangojs', packages=None): # paths of requested packages for package in packages: p = importlib.import_module(package) - path = os.path.join(os.path.dirname(p.__file__), 'locale') + path = os.path.join(os.path.dirname(upath(p.__file__)), 'locale') paths.append(path) # add the filesystem paths listed in the LOCALE_PATHS setting paths.extend(list(reversed(settings.LOCALE_PATHS))) diff --git a/docs/Makefile b/docs/Makefile index bdf48549a3..f6293a8e7f 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -31,6 +31,7 @@ help: @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" + @echo " texinfo to make a Texinfo source file" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @@ -116,6 +117,11 @@ man: @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished; the Texinfo files are in $(BUILDDIR)/texinfo." + gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo diff --git a/docs/_theme/djangodocs/static/djangodocs.css b/docs/_theme/djangodocs/static/djangodocs.css index 4adb8387cc..4efb7e04f3 100644 --- a/docs/_theme/djangodocs/static/djangodocs.css +++ b/docs/_theme/djangodocs/static/djangodocs.css @@ -1,7 +1,7 @@ /*** setup ***/ html { background:#092e20;} body { font:12px/1.5 Verdana,sans-serif; background:#092e20; color: white;} -#custom-doc { width:76.54em;*width:74.69em;min-width:995px; max-width:100em; margin:auto; text-align:left; padding-top:16px; margin-top:0;} +#custom-doc { width:76.54em;*width:74.69em;min-width:995px; max-width:100em; margin:auto; text-align:left; padding-top:16px; margin-top:0;} #hd { padding: 4px 0 12px 0; } #bd { background:#234F32; } #ft { color:#487858; font-size:90%; padding-bottom: 2em; } @@ -54,7 +54,7 @@ hr { color:#ccc; background-color:#ccc; height:1px; border:0; } p, ul, dl { margin-top:.6em; margin-bottom:1em; padding-bottom: 0.1em;} #yui-main div.yui-b img { max-width: 50em; margin-left: auto; margin-right: auto; display: block; } caption { font-size:1em; font-weight:bold; margin-top:0.5em; margin-bottom:0.5em; margin-left: 2px; text-align: center; } -blockquote { padding: 0 1em; margin: 1em 0; font:125%/1.2em "Trebuchet MS", sans-serif; color:#234f32; border-left:2px solid #94da3a; } +blockquote { padding: 0 1em; margin: 1em 0; font:125%/1.2em "Trebuchet MS", sans-serif; color:#234f32; border-left:2px solid #94da3a; } strong { font-weight: bold; } em { font-style: italic; } ins { font-weight: bold; text-decoration: none; } @@ -111,6 +111,7 @@ dt .literal, table .literal { background:none; } .note, .admonition { padding-left:65px; background:url(docicons-note.png) .8em .8em no-repeat;} div.admonition-philosophy { padding-left:65px; background:url(docicons-philosophy.png) .8em .8em no-repeat;} div.admonition-behind-the-scenes { padding-left:65px; background:url(docicons-behindscenes.png) .8em .8em no-repeat;} +.admonition.warning { background:url(docicons-warning.png) .8em .8em no-repeat; border:1px solid #ffc83c;} /*** versoinadded/changes ***/ div.versionadded, div.versionchanged { } diff --git a/docs/_theme/djangodocs/static/docicons-warning.png b/docs/_theme/djangodocs/static/docicons-warning.png new file mode 100644 index 0000000000..031b3e782a Binary files /dev/null and b/docs/_theme/djangodocs/static/docicons-warning.png differ diff --git a/docs/conf.py b/docs/conf.py index 433fd679a1..f58e4ecb2e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,12 +14,15 @@ from __future__ import unicode_literals import sys -import os +from os.path import abspath, dirname, join + +# Make sure we get the version of this copy of Django +sys.path.insert(1, dirname(dirname(abspath(__file__)))) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext"))) +sys.path.append(abspath(join(dirname(__file__), "_ext"))) # -- General configuration ----------------------------------------------------- @@ -52,11 +55,23 @@ copyright = 'Django Software Foundation and contributors' # built documents. # # The short X.Y version. -version = '1.5' +version = '1.6' # The full version, including alpha/beta/rc tags. -release = '1.5' +try: + from django import VERSION, get_version +except ImportError: + release = version +else: + def django_release(): + pep386ver = get_version() + if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep386ver: + return pep386ver + '.dev' + return pep386ver + + release = django_release() + # The next version to be released -django_next_version = '1.6' +django_next_version = '1.7' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -233,6 +248,16 @@ man_pages = [ ] +# -- Options for Texinfo output ------------------------------------------------ + +# List of tuples (startdocname, targetname, title, author, dir_entry, +# description, category, toctree_only) +texinfo_documents=[( + master_doc, "django", "", "", "Django", + "Documentation of the Django framework", "Web development", False +)] + + # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. diff --git a/docs/faq/admin.txt b/docs/faq/admin.txt index 872ad254c9..30d452cbe2 100644 --- a/docs/faq/admin.txt +++ b/docs/faq/admin.txt @@ -10,8 +10,8 @@ things: * Set the :setting:`SESSION_COOKIE_DOMAIN` setting in your admin config file to match your domain. For example, if you're going to - "http://www.example.com/admin/" in your browser, in - "myproject.settings" you should set ``SESSION_COOKIE_DOMAIN = 'www.example.com'``. + "http://www.example.com/admin/" in your browser, in "myproject.settings" you + should set :setting:`SESSION_COOKIE_DOMAIN` = 'www.example.com'. * Some browsers (Firefox?) don't like to accept cookies from domains that don't have dots in them. If you're running the admin site on "localhost" @@ -23,8 +23,9 @@ I can't log in. When I enter a valid username and password, it brings up the log ----------------------------------------------------------------------------------------------------------------------------------------------------------- If you're sure your username and password are correct, make sure your user -account has ``is_active`` and ``is_staff`` set to True. The admin site only -allows access to users with those two fields both set to True. +account has :attr:`~django.contrib.auth.models.User.is_active` and +:attr:`~django.contrib.auth.models.User.is_staff` set to True. The admin site +only allows access to users with those two fields both set to True. How can I prevent the cache middleware from caching the admin site? ------------------------------------------------------------------- @@ -64,9 +65,10 @@ My "list_filter" contains a ManyToManyField, but the filter doesn't display. Django won't bother displaying the filter for a ``ManyToManyField`` if there are fewer than two related objects. -For example, if your ``list_filter`` includes ``sites``, and there's only one -site in your database, it won't display a "Site" filter. In that case, -filtering by site would be meaningless. +For example, if your :attr:`~django.contrib.admin.ModelAdmin.list_filter` +includes :doc:`sites `, and there's only one site in your +database, it won't display a "Site" filter. In that case, filtering by site +would be meaningless. Some objects aren't appearing in the admin. ------------------------------------------- @@ -85,9 +87,10 @@ How can I customize the functionality of the admin interface? You've got several options. If you want to piggyback on top of an add/change form that Django automatically generates, you can attach arbitrary JavaScript -modules to the page via the model's ``class Admin`` ``js`` parameter. That -parameter is a list of URLs, as strings, pointing to JavaScript modules that -will be included within the admin form via a `` + + + +

+ +

+ +

+ +

+ +

+ +

+ +

+ +

+ +

+ +

+ +

+ +

+ + + diff --git a/tests/regressiontests/views/tests/__init__.py b/tests/regressiontests/views/tests/__init__.py index 12d0c59014..17f3f4562d 100644 --- a/tests/regressiontests/views/tests/__init__.py +++ b/tests/regressiontests/views/tests/__init__.py @@ -4,7 +4,7 @@ from .debug import (DebugViewTests, ExceptionReporterTests, ExceptionReporterTests, PlainTextReportTests, ExceptionReporterFilterTests, AjaxResponseExceptionReporterFilter) from .defaults import DefaultsTests -from .i18n import JsI18NTests, I18NTests, JsI18NTestsMultiPackage +from .i18n import JsI18NTests, I18NTests, JsI18NTestsMultiPackage, JavascriptI18nTests from .shortcuts import ShortcutTests from .specials import URLHandling from .static import StaticHelperTest, StaticUtilsTests, StaticTests diff --git a/tests/regressiontests/views/tests/debug.py b/tests/regressiontests/views/tests/debug.py index e616d184b8..4fdaad5010 100644 --- a/tests/regressiontests/views/tests/debug.py +++ b/tests/regressiontests/views/tests/debug.py @@ -14,6 +14,7 @@ from django.core.urlresolvers import reverse from django.test import TestCase, RequestFactory from django.test.utils import (override_settings, setup_test_template_loader, restore_template_loaders) +from django.utils.encoding import force_text from django.views.debug import ExceptionReporter from .. import BrokenException, except_args @@ -306,15 +307,16 @@ class ExceptionReportTestMixin(object): self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. - self.assertNotIn('cooked_eggs', email.body) - self.assertNotIn('scrambled', email.body) - self.assertNotIn('sauce', email.body) - self.assertNotIn('worcestershire', email.body) + body = force_text(email.body) + self.assertNotIn('cooked_eggs', body) + self.assertNotIn('scrambled', body) + self.assertNotIn('sauce', body) + self.assertNotIn('worcestershire', body) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. - self.assertIn(k, email.body) - self.assertIn(v, email.body) + self.assertIn(k, body) + self.assertIn(v, body) def verify_safe_email(self, view, check_for_POST_params=True): """ @@ -327,20 +329,21 @@ class ExceptionReportTestMixin(object): self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. - self.assertNotIn('cooked_eggs', email.body) - self.assertNotIn('scrambled', email.body) - self.assertNotIn('sauce', email.body) - self.assertNotIn('worcestershire', email.body) + body = force_text(email.body) + self.assertNotIn('cooked_eggs', body) + self.assertNotIn('scrambled', body) + self.assertNotIn('sauce', body) + self.assertNotIn('worcestershire', body) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. - self.assertIn(k, email.body) + self.assertIn(k, body) # Non-sensitive POST parameters' values are shown. - self.assertIn('baked-beans-value', email.body) - self.assertIn('hash-brown-value', email.body) + self.assertIn('baked-beans-value', body) + self.assertIn('hash-brown-value', body) # Sensitive POST parameters' values are not shown. - self.assertNotIn('sausage-value', email.body) - self.assertNotIn('bacon-value', email.body) + self.assertNotIn('sausage-value', body) + self.assertNotIn('bacon-value', body) def verify_paranoid_email(self, view): """ @@ -353,15 +356,16 @@ class ExceptionReportTestMixin(object): self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. - self.assertNotIn('cooked_eggs', email.body) - self.assertNotIn('scrambled', email.body) - self.assertNotIn('sauce', email.body) - self.assertNotIn('worcestershire', email.body) + body = force_text(email.body) + self.assertNotIn('cooked_eggs', body) + self.assertNotIn('scrambled', body) + self.assertNotIn('sauce', body) + self.assertNotIn('worcestershire', body) for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. - self.assertIn(k, email.body) + self.assertIn(k, body) # No POST parameters' values are shown. - self.assertNotIn(v, email.body) + self.assertNotIn(v, body) class ExceptionReporterFilterTests(TestCase, ExceptionReportTestMixin): diff --git a/tests/regressiontests/views/tests/i18n.py b/tests/regressiontests/views/tests/i18n.py index 601df6d512..b1dc8808a1 100644 --- a/tests/regressiontests/views/tests/i18n.py +++ b/tests/regressiontests/views/tests/i18n.py @@ -6,11 +6,18 @@ from os import path from django.conf import settings from django.core.urlresolvers import reverse -from django.test import TestCase -from django.utils import six -from django.utils.translation import override, get_language +from django.test import LiveServerTestCase, TestCase +from django.test.utils import override_settings +from django.utils import six, unittest +from django.utils._os import upath +from django.utils.translation import override from django.utils.text import javascript_quote +try: + from selenium.webdriver.firefox import webdriver as firefox +except ImportError: + firefox = None + from ..urls import locale_dir @@ -18,13 +25,28 @@ class I18NTests(TestCase): """ Tests django views in django/views/i18n.py """ def test_setlang(self): - """The set_language view can be used to change the session language""" + """ + The set_language view can be used to change the session language. + + The user is redirected to the 'next' argument if provided. + """ for lang_code, lang_name in settings.LANGUAGES: post_data = dict(language=lang_code, next='/views/') response = self.client.post('/views/i18n/setlang/', data=post_data) self.assertRedirects(response, 'http://testserver/views/') self.assertEqual(self.client.session['django_language'], lang_code) + def test_setlang_unsafe_next(self): + """ + The set_language view only redirects to the 'next' argument if it is + "safe". + """ + lang_code, lang_name = settings.LANGUAGES[0] + post_data = dict(language=lang_code, next='//unsafe/redirection/') + response = self.client.post('/views/i18n/setlang/', data=post_data) + self.assertEqual(response['Location'], 'http://testserver/') + self.assertEqual(self.client.session['django_language'], lang_code) + def test_setlang_reversal(self): self.assertEqual(reverse('set_language'), '/views/i18n/setlang/') @@ -146,9 +168,49 @@ class JsI18NTestsMultiPackage(TestCase): def testI18NWithLocalePaths(self): extended_locale_paths = settings.LOCALE_PATHS + ( path.join(path.dirname( - path.dirname(path.abspath(__file__))), 'app3', 'locale'),) + path.dirname(path.abspath(upath(__file__)))), 'app3', 'locale'),) with self.settings(LANGUAGE_CODE='es-ar', LOCALE_PATHS=extended_locale_paths): with override('es-ar'): response = self.client.get('/views/jsi18n/') self.assertContains(response, javascript_quote('este texto de app3 debe ser traducido')) + + +@unittest.skipUnless(firefox, 'Selenium not installed') +class JavascriptI18nTests(LiveServerTestCase): + urls = 'regressiontests.views.urls' + + @classmethod + def setUpClass(cls): + cls.selenium = firefox.WebDriver() + super(JavascriptI18nTests, cls).setUpClass() + + @classmethod + def tearDownClass(cls): + cls.selenium.quit() + super(JavascriptI18nTests, cls).tearDownClass() + + @override_settings(LANGUAGE_CODE='de') + def test_javascript_gettext(self): + extended_apps = list(settings.INSTALLED_APPS) + ['regressiontests.views'] + with self.settings(INSTALLED_APPS=extended_apps): + self.selenium.get('%s%s' % (self.live_server_url, '/jsi18n_template/')) + + elem = self.selenium.find_element_by_id("gettext") + self.assertEqual(elem.text, "Entfernen") + elem = self.selenium.find_element_by_id("ngettext_sing") + self.assertEqual(elem.text, "1 Element") + elem = self.selenium.find_element_by_id("ngettext_plur") + self.assertEqual(elem.text, "455 Elemente") + elem = self.selenium.find_element_by_id("pgettext") + self.assertEqual(elem.text, "Kann") + elem = self.selenium.find_element_by_id("npgettext_sing") + self.assertEqual(elem.text, "1 Resultat") + elem = self.selenium.find_element_by_id("npgettext_plur") + self.assertEqual(elem.text, "455 Resultate") + + def test_escaping(self): + extended_apps = list(settings.INSTALLED_APPS) + ['regressiontests.views'] + with self.settings(INSTALLED_APPS=extended_apps): + response = self.client.get('%s%s' % (self.live_server_url, '/jsi18n_admin/')) + self.assertContains(response, '\\x04') diff --git a/tests/regressiontests/views/urls.py b/tests/regressiontests/views/urls.py index 90d2382f71..2c06557ae9 100644 --- a/tests/regressiontests/views/urls.py +++ b/tests/regressiontests/views/urls.py @@ -4,11 +4,12 @@ from __future__ import absolute_import from os import path from django.conf.urls import patterns, url, include +from django.utils._os import upath from . import views -base_dir = path.dirname(path.abspath(__file__)) +base_dir = path.dirname(path.abspath(upath(__file__))) media_dir = path.join(base_dir, 'media') locale_dir = path.join(base_dir, 'locale') @@ -32,6 +33,11 @@ js_info_dict_multi_packages2 = { 'packages': ('regressiontests.views.app3', 'regressiontests.views.app4'), } +js_info_dict_admin = { + 'domain': 'djangojs', + 'packages': ('django.contrib.admin', 'regressiontests.views'), +} + urlpatterns = patterns('', (r'^$', views.index_page), @@ -51,6 +57,8 @@ urlpatterns = patterns('', (r'^jsi18n_english_translation/$', 'django.views.i18n.javascript_catalog', js_info_dict_english_translation), (r'^jsi18n_multi_packages1/$', 'django.views.i18n.javascript_catalog', js_info_dict_multi_packages1), (r'^jsi18n_multi_packages2/$', 'django.views.i18n.javascript_catalog', js_info_dict_multi_packages2), + (r'^jsi18n_admin/$', 'django.views.i18n.javascript_catalog', js_info_dict_admin), + (r'^jsi18n_template/$', views.jsi18n), # Static views (r'^site_media/(?P.*)$', 'django.views.static.serve', {'document_root': media_dir}), diff --git a/tests/regressiontests/views/views.py b/tests/regressiontests/views/views.py index 2836d1bdde..ed9d61144a 100644 --- a/tests/regressiontests/views/views.py +++ b/tests/regressiontests/views/views.py @@ -51,6 +51,9 @@ def template_exception(request, n): return render_to_response('debug/template_exception.html', {'arg': except_args[int(n)]}) +def jsi18n(request): + return render_to_response('jsi18n.html') + # Some views to exercise the shortcuts def render_to_response_view(request): diff --git a/tests/runtests.py b/tests/runtests.py index a81fee6858..8c56e273b5 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -7,6 +7,7 @@ import tempfile import warnings from django import contrib +from django.utils._os import upath from django.utils import six # databrowse is deprecated, but we still want to run its tests @@ -19,8 +20,8 @@ REGRESSION_TESTS_DIR_NAME = 'regressiontests' TEST_TEMPLATE_DIR = 'templates' -RUNTESTS_DIR = os.path.dirname(__file__) -CONTRIB_DIR = os.path.dirname(contrib.__file__) +RUNTESTS_DIR = os.path.dirname(upath(__file__)) +CONTRIB_DIR = os.path.dirname(upath(contrib.__file__)) MODEL_TEST_DIR = os.path.join(RUNTESTS_DIR, MODEL_TESTS_DIR_NAME) REGRESSION_TEST_DIR = os.path.join(RUNTESTS_DIR, REGRESSION_TESTS_DIR_NAME) TEMP_DIR = tempfile.mkdtemp(prefix='django_') @@ -192,7 +193,7 @@ def bisect_tests(bisection_label, options, test_labels): pass subprocess_args = [ - sys.executable, __file__, '--settings=%s' % options.settings] + sys.executable, upath(__file__), '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: @@ -253,7 +254,7 @@ def paired_tests(paired_test, options, test_labels): pass subprocess_args = [ - sys.executable, __file__, '--settings=%s' % options.settings] + sys.executable, upath(__file__), '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: @@ -277,7 +278,7 @@ if __name__ == "__main__": usage = "%prog [options] [module module module ...]" parser = OptionParser(usage=usage) parser.add_option( - '-v','--verbosity', action='store', dest='verbosity', default='1', + '-v', '--verbosity', action='store', dest='verbosity', default='1', type='choice', choices=['0', '1', '2', '3'], help='Verbosity level; 0=minimal output, 1=normal output, 2=all ' 'output')
@@ -22,7 +22,7 @@
{{ model.name }}