diff --git a/django/apps/registry.py b/django/apps/registry.py
index 92de6075fc..4b727e157a 100644
--- a/django/apps/registry.py
+++ b/django/apps/registry.py
@@ -288,9 +288,9 @@ class Apps:
referred model is not swappable, return None.
This method is decorated with @functools.cache because it's performance
- critical when it comes to migrations. Since the swappable settings don't
- change after Django has loaded the settings, there is no reason to get
- the respective settings attribute over and over again.
+ critical when it comes to migrations. Since the swappable settings
+ don't change after Django has loaded the settings, there is no reason
+ to get the respective settings attribute over and over again.
"""
to_string = to_string.lower()
for model in self.get_models(include_swapped=True):
@@ -378,8 +378,9 @@ class Apps:
# the relation tree and the fields cache.
self.get_models.cache_clear()
if self.ready:
- # Circumvent self.get_models() to prevent that the cache is refilled.
- # This particularly prevents that an empty value is cached while cloning.
+ # Circumvent self.get_models() to prevent that the cache is
+ # refilled. This particularly prevents that an empty value is
+ # cached while cloning.
for app_config in self.app_configs.values():
for model in app_config.get_models(include_auto_created=True):
model._meta._expire_cache()
diff --git a/django/conf/__init__.py b/django/conf/__init__.py
index 6b5f044e34..c7ae36aba0 100644
--- a/django/conf/__init__.py
+++ b/django/conf/__init__.py
@@ -77,7 +77,8 @@ class LazySettings(LazyObject):
val = getattr(_wrapped, name)
# Special case some settings which require further modification.
- # This is done here for performance reasons so the modified value is cached.
+ # This is done here for performance reasons so the modified value is
+ # cached.
if name in {"MEDIA_URL", "STATIC_URL"} and val is not None:
val = self._add_script_prefix(val)
elif name == "SECRET_KEY" and not val:
@@ -149,7 +150,8 @@ class LazySettings(LazyObject):
class Settings:
def __init__(self, settings_module):
- # update this dict from global settings (but only for ALL_CAPS settings)
+ # update this dict from global settings (but only for ALL_CAPS
+ # settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
diff --git a/django/conf/global_settings.py b/django/conf/global_settings.py
index 25ac49becf..ffbe5d3980 100644
--- a/django/conf/global_settings.py
+++ b/django/conf/global_settings.py
@@ -317,9 +317,9 @@ DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# before a SuspiciousOperation (TooManyFilesSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FILES = 100
-# Directory in which upload streamed files will be temporarily saved. A value of
-# `None` will make Django use the operating system's default temporary directory
-# (i.e. "/tmp" on *nix systems).
+# Directory in which upload streamed files will be temporarily saved. A value
+# of `None` will make Django use the operating system's default temporary
+# directory (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
@@ -327,9 +327,9 @@ FILE_UPLOAD_TEMP_DIR = None
# https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = 0o644
-# The numeric mode to assign to newly-created directories, when uploading files.
-# The value should be a mode as you'd pass to os.chmod;
-# see https://docs.python.org/library/os.html#files-and-directories.
+# The numeric mode to assign to newly-created directories, when uploading
+# files. The value should be a mode as you'd pass to os.chmod; see
+# https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
@@ -342,7 +342,8 @@ FORMAT_MODULE_PATH = None
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = "N j, Y"
-# Default formatting for datetime objects. See all available format strings here:
+# Default formatting for datetime objects. See all available format strings
+# here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = "N j, Y, P"
@@ -350,8 +351,8 @@ DATETIME_FORMAT = "N j, Y, P"
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = "P"
-# Default formatting for date objects when only the year and month are relevant.
-# See all available format strings here:
+# Default formatting for date objects when only the year and month are
+# relevant. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = "F Y"
@@ -360,7 +361,8 @@ YEAR_MONTH_FORMAT = "F Y"
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = "F j"
-# Default short formatting for date objects. See all available format strings here:
+# Default short formatting for date objects. See all available format strings
+# here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = "m/d/Y"
diff --git a/django/conf/locale/__init__.py b/django/conf/locale/__init__.py
index 6ac7bd3bdb..04962042b3 100644
--- a/django/conf/locale/__init__.py
+++ b/django/conf/locale/__init__.py
@@ -1,10 +1,11 @@
"""
-LANG_INFO is a dictionary structure to provide meta information about languages.
+LANG_INFO is a dictionary structure to provide meta information about
+languages.
About name_local: capitalize it as if your language name was appearing
inside a sentence in your language.
-The 'fallback' key can be used to specify a special fallback logic which doesn't
-follow the traditional 'fr-ca' -> 'fr' fallback logic.
+The 'fallback' key can be used to specify a special fallback logic which
+doesn't follow the traditional 'fr-ca' -> 'fr' fallback logic.
"""
LANG_INFO = {
diff --git a/django/contrib/admin/actions.py b/django/contrib/admin/actions.py
index 865c16aff2..04a906542a 100644
--- a/django/contrib/admin/actions.py
+++ b/django/contrib/admin/actions.py
@@ -24,7 +24,8 @@ def delete_selected(modeladmin, request, queryset):
deletable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
- Next, it deletes all selected objects and redirects back to the change list.
+ Next, it deletes all selected objects and redirects back to the change
+ list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
diff --git a/django/contrib/admin/checks.py b/django/contrib/admin/checks.py
index 775bb12145..10257a54bf 100644
--- a/django/contrib/admin/checks.py
+++ b/django/contrib/admin/checks.py
@@ -316,7 +316,8 @@ class BaseModelAdminChecks:
def _check_fields(self, obj):
"""Check that `fields` only refer to existing fields, doesn't contain
- duplicates. Check if at most one of `fields` and `fieldsets` is defined.
+ duplicates. Check if at most one of `fields` and `fieldsets` is
+ defined.
"""
if obj.fields is None:
diff --git a/django/contrib/admin/options.py b/django/contrib/admin/options.py
index c3ccc6c4fe..6c202c8e61 100644
--- a/django/contrib/admin/options.py
+++ b/django/contrib/admin/options.py
@@ -170,10 +170,10 @@ class BaseModelAdmin(metaclass=forms.MediaDefiningClass):
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
- # Combine the field kwargs with any options for formfield_overrides.
- # Make sure the passed in **kwargs override anything in
- # formfield_overrides because **kwargs is more specific, and should
- # always win.
+ # Combine the field kwargs with any options for
+ # formfield_overrides. Make sure the passed in **kwargs override
+ # anything in formfield_overrides because **kwargs is more
+ # specific, and should always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs}
@@ -2182,8 +2182,8 @@ class ModelAdmin(BaseModelAdmin):
if obj is None:
return self._get_obj_does_not_exist_redirect(request, self.opts, object_id)
- # Populate deleted_objects, a data structure of all related objects that
- # will also be deleted.
+ # Populate deleted_objects, a data structure of all related objects
+ # that will also be deleted.
(
deleted_objects,
model_count,
diff --git a/django/contrib/admin/sites.py b/django/contrib/admin/sites.py
index 9c9aa21f57..17af19fd1b 100644
--- a/django/contrib/admin/sites.py
+++ b/django/contrib/admin/sites.py
@@ -29,11 +29,11 @@ all_sites = WeakSet()
class AdminSite:
"""
- An AdminSite object encapsulates an instance of the Django admin application, ready
- to be hooked in to your URLconf. Models are registered with the AdminSite using the
- register() method, and the get_urls() method can then be used to access Django view
- functions that present a full admin interface for the collection of registered
- models.
+ An AdminSite object encapsulates an instance of the Django admin
+ application, ready to be hooked in to your URLconf. Models are registered
+ with the AdminSite using the register() method, and the get_urls() method
+ can then be used to access Django view functions that present a full admin
+ interface for the collection of registered models.
"""
# Text to put at the end of each page's
.
@@ -136,9 +136,9 @@ class AdminSite:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
- # For reasons I don't quite understand, without a __module__
- # the created class appears to "live" in the wrong place,
- # which causes issues later on.
+ # For reasons I don't quite understand, without a
+ # __module__ the created class appears to "live" in the
+ # wrong place, which causes issues later on.
options["__module__"] = __name__
admin_class = type(
"%sAdmin" % model.__name__, (admin_class,), options
diff --git a/django/contrib/admin/templatetags/admin_list.py b/django/contrib/admin/templatetags/admin_list.py
index 1e6f8bf298..43d87c0024 100644
--- a/django/contrib/admin/templatetags/admin_list.py
+++ b/django/contrib/admin/templatetags/admin_list.py
@@ -256,7 +256,8 @@ def items_for_result(cl, result, form):
):
row_classes.append("nowrap")
row_class = mark_safe(' class="%s"' % " ".join(row_classes))
- # If list_display_links not defined, add the link tag to the first field
+ # If list_display_links not defined, add the link tag to the first
+ # field
if link_to_changelist:
table_tag = "th" if first else "td"
first = False
@@ -293,9 +294,9 @@ def items_for_result(cl, result, form):
"<{}{}>{}{}>", table_tag, row_class, link_or_text, table_tag
)
else:
- # By default the fields come from ModelAdmin.list_editable, but if we pull
- # the fields out of the form instead of list_editable custom admins
- # can provide fields on a per request basis
+ # By default the fields come from ModelAdmin.list_editable, but if
+ # we pull the fields out of the form instead of list_editable
+ # custom admins can provide fields on a per request basis
if (
form
and field_name in form.fields
diff --git a/django/contrib/admin/tests.py b/django/contrib/admin/tests.py
index b95a37b959..bd9c76b5fd 100644
--- a/django/contrib/admin/tests.py
+++ b/django/contrib/admin/tests.py
@@ -218,9 +218,9 @@ class AdminSeleniumTestCase(SeleniumTestCase, StaticLiveServerTestCase):
actual_values.append(option.get_attribute("value"))
self.assertEqual(values, actual_values)
else:
- # Prevent the `find_elements(By.CSS_SELECTOR, …)` call from blocking
- # if the selector doesn't match any options as we expect it
- # to be the case.
+ # Prevent the `find_elements(By.CSS_SELECTOR, …)` call from
+ # blocking if the selector doesn't match any options as we expect
+ # it to be the case.
with self.disable_implicit_wait():
self.wait_until(
lambda driver: not driver.find_elements(
diff --git a/django/contrib/admin/utils.py b/django/contrib/admin/utils.py
index eec93fa4be..74bd571e56 100644
--- a/django/contrib/admin/utils.py
+++ b/django/contrib/admin/utils.py
@@ -73,7 +73,8 @@ def prepare_lookup_value(key, value, separator=","):
# if key ends with __in, split parameter into separate values
if key.endswith("__in"):
value = value.split(separator)
- # if key ends with __isnull, special case '' and the string literals 'false' and '0'
+ # if key ends with __isnull, special case '' and the string literals
+ # 'false' and '0'
elif key.endswith("__isnull"):
value = value.lower() not in ("", "false", "0")
return value
@@ -558,9 +559,10 @@ def construct_change_message(form, formsets, add):
Translations are deactivated so that strings are stored untranslated.
Translation happens later on LogEntry access.
"""
- # Evaluating `form.changed_data` prior to disabling translations is required
- # to avoid fields affected by localization from being included incorrectly,
- # e.g. where date formats differ such as MM/DD/YYYY vs DD/MM/YYYY.
+ # Evaluating `form.changed_data` prior to disabling translations is
+ # required to avoid fields affected by localization from being included
+ # incorrectly, e.g. where date formats differ such as MM/DD/YYYY vs
+ # DD/MM/YYYY.
changed_data = form.changed_data
with translation_override(None):
# Deactivate translations while fetching verbose_name for form
diff --git a/django/contrib/admin/views/main.py b/django/contrib/admin/views/main.py
index ed6c6f9219..8c9118808e 100644
--- a/django/contrib/admin/views/main.py
+++ b/django/contrib/admin/views/main.py
@@ -322,7 +322,8 @@ class ChangeList:
self.result_count = result_count
self.show_full_result_count = self.model_admin.show_full_result_count
# Admin actions are shown if there is at least one entry
- # or if entries are not counted because show_full_result_count is disabled
+ # or if entries are not counted because show_full_result_count is
+ # disabled
self.show_admin_actions = not self.show_full_result_count or bool(
full_result_count
)
@@ -485,8 +486,8 @@ class ChangeList:
"""
Return a dictionary of ordering field column numbers and asc/desc.
"""
- # We must cope with more than one column having the same underlying sort
- # field, so we base things on column numbers.
+ # We must cope with more than one column having the same underlying
+ # sort field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = {}
if ORDER_VAR not in self.params:
diff --git a/django/contrib/admin/widgets.py b/django/contrib/admin/widgets.py
index fc83267e43..a601fc2667 100644
--- a/django/contrib/admin/widgets.py
+++ b/django/contrib/admin/widgets.py
@@ -215,8 +215,8 @@ class ForeignKeyRawIdWidget(forms.TextInput):
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
- A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
- in a box.
+ A Widget for displaying ManyToMany ids in the "raw_id" interface rather
+ than in a box.
"""
template_name = "admin/widgets/many_to_many_raw_id.html"
@@ -275,7 +275,8 @@ class RelatedFieldWidgetWrapper(forms.Widget):
if not isinstance(widget, AutocompleteMixin):
self.attrs["data-context"] = "available-source"
self.can_change_related = not multiple and can_change_related
- # XXX: The deletion UX can be confusing when dealing with cascading deletion.
+ # XXX: The deletion UX can be confusing when dealing with cascading
+ # deletion.
cascade = getattr(rel, "on_delete", None) is CASCADE
self.can_delete_related = not multiple and not cascade and can_delete_related
self.can_view_related = not multiple and can_view_related
diff --git a/django/contrib/admindocs/utils.py b/django/contrib/admindocs/utils.py
index 6603f4dcd5..f8e329a79d 100644
--- a/django/contrib/admindocs/utils.py
+++ b/django/contrib/admindocs/utils.py
@@ -69,8 +69,8 @@ def parse_rst(text, default_reference_context, thing_being_parsed=None):
"file_insertion_enabled": False,
}
thing_being_parsed = thing_being_parsed and "<%s>" % thing_being_parsed
- # Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
- # then restores it.
+ # Wrap ``text`` in some reST that sets the default role to
+ # ``cmsreference``, then restores it.
source = """
.. default-role:: cmsreference
diff --git a/django/contrib/auth/mixins.py b/django/contrib/auth/mixins.py
index 1f2e95ff00..9a59043897 100644
--- a/django/contrib/auth/mixins.py
+++ b/django/contrib/auth/mixins.py
@@ -33,7 +33,8 @@ class AccessMixin:
def get_permission_denied_message(self):
"""
- Override this method to override the permission_denied_message attribute.
+ Override this method to override the permission_denied_message
+ attribute.
"""
return self.permission_denied_message
diff --git a/django/contrib/auth/password_validation.py b/django/contrib/auth/password_validation.py
index 38e7c5c3a8..690be58700 100644
--- a/django/contrib/auth/password_validation.py
+++ b/django/contrib/auth/password_validation.py
@@ -227,7 +227,8 @@ class CommonPasswordValidator:
The password is rejected if it occurs in a provided list of passwords,
which may be gzipped. The list Django ships with contains 20000 common
- passwords (unhexed, lowercased and deduplicated), created by Royce Williams:
+ passwords (unhexed, lowercased and deduplicated), created by Royce
+ Williams:
https://gist.github.com/roycewilliams/226886fd01572964e1431ac8afc999ce
The password list must be lowercased to match the comparison in validate().
"""
diff --git a/django/contrib/auth/urls.py b/django/contrib/auth/urls.py
index 699ba6179a..8365be689b 100644
--- a/django/contrib/auth/urls.py
+++ b/django/contrib/auth/urls.py
@@ -1,7 +1,7 @@
# The views used below are normally mapped in the AdminSite instance.
-# This URLs file is used to provide a reliable view deployment for test purposes.
-# It is also provided as a convenience to those who want to deploy these URLs
-# elsewhere.
+# This URLs file is used to provide a reliable view deployment for test
+# purposes. It is also provided as a convenience to those who want to deploy
+# these URLs elsewhere.
from django.contrib.auth import views
from django.urls import path
diff --git a/django/contrib/contenttypes/admin.py b/django/contrib/contenttypes/admin.py
index 617d6d2e81..f595ce5285 100644
--- a/django/contrib/contenttypes/admin.py
+++ b/django/contrib/contenttypes/admin.py
@@ -67,8 +67,8 @@ class GenericInlineModelAdminChecks(InlineModelAdminChecks):
)
]
- # There's one or more GenericForeignKeys; make sure that one of them
- # uses the right ct_field and ct_fk_field.
+ # There's one or more GenericForeignKeys; make sure that one of
+ # them uses the right ct_field and ct_fk_field.
for gfk in gfks:
if gfk.ct_field == obj.ct_field and gfk.fk_field == obj.ct_fk_field:
return []
diff --git a/django/contrib/contenttypes/fields.py b/django/contrib/contenttypes/fields.py
index f28c346576..d85b61933a 100644
--- a/django/contrib/contenttypes/fields.py
+++ b/django/contrib/contenttypes/fields.py
@@ -199,8 +199,9 @@ class GenericForeignKey(FieldCacheMixin, Field):
ct = self.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
- # For doing the join in Python, we have to match both the FK val and the
- # content type, so we use a callable that returns a (fk, class) pair.
+ # For doing the join in Python, we have to match both the FK val and
+ # the content type, so we use a callable that returns a (fk, class)
+ # pair.
def gfk_key(obj):
ct_id = getattr(obj, ct_attname)
if ct_id is None:
diff --git a/django/contrib/gis/db/backends/base/features.py b/django/contrib/gis/db/backends/base/features.py
index 22c90a1714..d1e2211bdc 100644
--- a/django/contrib/gis/db/backends/base/features.py
+++ b/django/contrib/gis/db/backends/base/features.py
@@ -8,10 +8,12 @@ from .operations import BaseSpatialOperations
class BaseSpatialFeatures:
gis_enabled = True
- # Does the database contain a SpatialRefSys model to store SRID information?
+ # Does the database contain a SpatialRefSys model to store SRID
+ # information?
has_spatialrefsys_table = True
- # Does the backend support the django.contrib.gis.utils.add_srs_entry() utility?
+ # Does the backend support the django.contrib.gis.utils.add_srs_entry()
+ # utility?
supports_add_srs_entry = True
# Does the backend introspect GeometryField to its subtypes?
supports_geometry_field_introspection = True
diff --git a/django/contrib/gis/db/backends/base/operations.py b/django/contrib/gis/db/backends/base/operations.py
index 5c09aaa8bc..b3bf4383ad 100644
--- a/django/contrib/gis/db/backends/base/operations.py
+++ b/django/contrib/gis/db/backends/base/operations.py
@@ -28,8 +28,8 @@ class BaseSpatialOperations:
geom_func_prefix = ""
- # Mapping between Django function names and backend names, when names do not
- # match; used in spatial_function_name().
+ # Mapping between Django function names and backend names, when names do
+ # not match; used in spatial_function_name().
function_names = {}
# Set of known unsupported functions of the backend
@@ -79,8 +79,8 @@ class BaseSpatialOperations:
# Constructors
from_text = False
- # Default conversion functions for aggregates; will be overridden if implemented
- # for the spatial backend.
+ # Default conversion functions for aggregates; will be overridden if
+ # implemented for the spatial backend.
def convert_extent(self, box, srid):
raise NotImplementedError(
"Aggregate extent not implemented for this spatial backend."
diff --git a/django/contrib/gis/db/backends/mysql/features.py b/django/contrib/gis/db/backends/mysql/features.py
index cd99420374..4e46ba40f3 100644
--- a/django/contrib/gis/db/backends/mysql/features.py
+++ b/django/contrib/gis/db/backends/mysql/features.py
@@ -17,5 +17,6 @@ class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
@cached_property
def supports_geometry_field_unique_index(self):
- # Not supported in MySQL since https://dev.mysql.com/worklog/task/?id=11808
+ # Not supported in MySQL since
+ # https://dev.mysql.com/worklog/task/?id=11808
return self.connection.mysql_is_mariadb
diff --git a/django/contrib/gis/db/backends/mysql/introspection.py b/django/contrib/gis/db/backends/mysql/introspection.py
index 4d6aea78a2..65e7549efc 100644
--- a/django/contrib/gis/db/backends/mysql/introspection.py
+++ b/django/contrib/gis/db/backends/mysql/introspection.py
@@ -19,9 +19,9 @@ class MySQLIntrospection(DatabaseIntrospection):
# column.
for column, typ, null, key, default, extra in cursor.fetchall():
if column == description.name:
- # Using OGRGeomType to convert from OGC name to Django field.
- # MySQL does not support 3D or SRIDs, so the field params
- # are empty.
+ # Using OGRGeomType to convert from OGC name to Django
+ # field. MySQL does not support 3D or SRIDs, so the field
+ # params are empty.
field_type = OGRGeomType(typ).django
field_params = {}
break
diff --git a/django/contrib/gis/db/backends/oracle/introspection.py b/django/contrib/gis/db/backends/oracle/introspection.py
index 8e1a5e7a8c..baaf658b43 100644
--- a/django/contrib/gis/db/backends/oracle/introspection.py
+++ b/django/contrib/gis/db/backends/oracle/introspection.py
@@ -17,7 +17,8 @@ class OracleIntrospection(DatabaseIntrospection):
def get_geometry_type(self, table_name, description):
with self.connection.cursor() as cursor:
- # Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information.
+ # Querying USER_SDO_GEOM_METADATA to get the SRID and dimension
+ # information.
try:
cursor.execute(
'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" '
@@ -31,8 +32,8 @@ class OracleIntrospection(DatabaseIntrospection):
'corresponding to "%s"."%s"' % (table_name, description.name)
) from exc
- # TODO: Research way to find a more specific geometry field type for
- # the column's contents.
+ # TODO: Research way to find a more specific geometry field type
+ # for the column's contents.
field_type = "GeometryField"
# Getting the field parameters.
diff --git a/django/contrib/gis/db/backends/oracle/operations.py b/django/contrib/gis/db/backends/oracle/operations.py
index 77cb8e034c..467fc530b3 100644
--- a/django/contrib/gis/db/backends/oracle/operations.py
+++ b/django/contrib/gis/db/backends/oracle/operations.py
@@ -227,8 +227,8 @@ class OracleOperations(BaseSpatialOperations, DatabaseOperations):
return OracleSpatialRefSys
def modify_insert_params(self, placeholder, params):
- """Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
- backend due to #10888.
+ """Drop out insert parameters for NULL placeholder. Needed for Oracle
+ Spatial backend due to #10888.
"""
if placeholder == "NULL":
return []
diff --git a/django/contrib/gis/db/backends/postgis/introspection.py b/django/contrib/gis/db/backends/postgis/introspection.py
index 7c96c2cb04..e3b9957838 100644
--- a/django/contrib/gis/db/backends/postgis/introspection.py
+++ b/django/contrib/gis/db/backends/postgis/introspection.py
@@ -61,7 +61,8 @@ class PostGISIntrospection(DatabaseIntrospection):
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(field_type).django
- # Getting any GeometryField keyword arguments that are not the default.
+ # Getting any GeometryField keyword arguments that are not the
+ # default.
field_params = {}
if self.postgis_oid_lookup.get(description.type_code) == "geography":
field_params["geography"] = True
diff --git a/django/contrib/gis/db/backends/postgis/operations.py b/django/contrib/gis/db/backends/postgis/operations.py
index 303e039958..df3cc7c7ee 100644
--- a/django/contrib/gis/db/backends/postgis/operations.py
+++ b/django/contrib/gis/db/backends/postgis/operations.py
@@ -328,7 +328,8 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
def _get_postgis_func(self, func):
"""
- Helper routine for calling PostGIS functions and returning their result.
+ Helper routine for calling PostGIS functions and returning their
+ result.
"""
# Close out the connection. See #9437.
with self.connection.temporary_connection() as cursor:
@@ -340,7 +341,9 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
return self._get_postgis_func("postgis_geos_version")
def postgis_lib_version(self):
- "Return the version number of the PostGIS library used with PostgreSQL."
+ """
+ Return the version number of the PostGIS library used with PostgreSQL.
+ """
return self._get_postgis_func("postgis_lib_version")
def postgis_proj_version(self):
diff --git a/django/contrib/gis/db/backends/postgis/schema.py b/django/contrib/gis/db/backends/postgis/schema.py
index c74b574c4c..aef62198cb 100644
--- a/django/contrib/gis/db/backends/postgis/schema.py
+++ b/django/contrib/gis/db/backends/postgis/schema.py
@@ -100,8 +100,9 @@ class PostGISSchemaEditor(DatabaseSchemaEditor):
opclasses = None
fields = [field]
if field.geom_type == "RASTER":
- # For raster fields, wrap index creation SQL statement with ST_ConvexHull.
- # Indexes on raster columns are based on the convex hull of the raster.
+ # For raster fields, wrap index creation SQL statement with
+ # ST_ConvexHull. Indexes on raster columns are based on the convex
+ # hull of the raster.
expressions = Func(Col(None, field), template=self.rast_index_template)
fields = None
elif field.dim > 2 and not field.geography:
diff --git a/django/contrib/gis/db/backends/spatialite/introspection.py b/django/contrib/gis/db/backends/spatialite/introspection.py
index 8d0003fd53..f2f5146586 100644
--- a/django/contrib/gis/db/backends/spatialite/introspection.py
+++ b/django/contrib/gis/db/backends/spatialite/introspection.py
@@ -52,7 +52,8 @@ class SpatiaLiteIntrospection(DatabaseIntrospection):
ogr_type = ogr_type % 1000 + OGRGeomType.wkb25bit
field_type = OGRGeomType(ogr_type).django
- # Getting any GeometryField keyword arguments that are not the default.
+ # Getting any GeometryField keyword arguments that are not the
+ # default.
dim = row[0]
srid = row[1]
field_params = {}
diff --git a/django/contrib/gis/db/backends/spatialite/operations.py b/django/contrib/gis/db/backends/spatialite/operations.py
index 3509001426..e9321ee2a3 100644
--- a/django/contrib/gis/db/backends/spatialite/operations.py
+++ b/django/contrib/gis/db/backends/spatialite/operations.py
@@ -48,9 +48,11 @@ class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
"relate": SpatialiteNullCheckOperator(func="Relate"),
"coveredby": SpatialiteNullCheckOperator(func="CoveredBy"),
"covers": SpatialiteNullCheckOperator(func="Covers"),
- # Returns true if B's bounding box completely contains A's bounding box.
+ # Returns true if B's bounding box completely contains A's bounding
+ # box.
"contained": SpatialOperator(func="MbrWithin"),
- # Returns true if A's bounding box completely contains B's bounding box.
+ # Returns true if A's bounding box completely contains B's bounding
+ # box.
"bbcontains": SpatialOperator(func="MbrContains"),
# Returns true if A's bounding box overlaps B's bounding box.
"bboverlaps": SpatialOperator(func="MbrOverlaps"),
diff --git a/django/contrib/gis/db/backends/spatialite/schema.py b/django/contrib/gis/db/backends/spatialite/schema.py
index fb2c5690de..69b8da2501 100644
--- a/django/contrib/gis/db/backends/spatialite/schema.py
+++ b/django/contrib/gis/db/backends/spatialite/schema.py
@@ -91,7 +91,8 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
def delete_model(self, model, **kwargs):
from django.contrib.gis.db.models import GeometryField
- # Drop spatial metadata (dropping the table does not automatically remove them)
+ # Drop spatial metadata (dropping the table does not automatically
+ # remove them)
for field in model._meta.local_fields:
if isinstance(field, GeometryField):
self.remove_geometry_metadata(model, field)
@@ -126,9 +127,9 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
# NOTE: If the field is a geometry field, the table is just recreated,
# the parent's remove_field can't be used cause it will skip the
- # recreation if the field does not have a database type. Geometry fields
- # do not have a db type cause they are added and removed via stored
- # procedures.
+ # recreation if the field does not have a database type. Geometry
+ # fields do not have a db type cause they are added and removed via
+ # stored procedures.
if isinstance(field, GeometryField):
self._remake_table(model, delete_field=field)
else:
diff --git a/django/contrib/gis/db/backends/utils.py b/django/contrib/gis/db/backends/utils.py
index ffb7420019..4e2035d577 100644
--- a/django/contrib/gis/db/backends/utils.py
+++ b/django/contrib/gis/db/backends/utils.py
@@ -6,7 +6,8 @@ backends.
class SpatialOperator:
"""
- Class encapsulating the behavior specific to a GIS operation (used by lookups).
+ Class encapsulating the behavior specific to a GIS operation (used by
+ lookups).
"""
sql_template = None
diff --git a/django/contrib/gis/db/models/fields.py b/django/contrib/gis/db/models/fields.py
index 4542e19040..d1c1a5937e 100644
--- a/django/contrib/gis/db/models/fields.py
+++ b/django/contrib/gis/db/models/fields.py
@@ -81,8 +81,8 @@ class BaseSpatialField(Field):
def __init__(self, verbose_name=None, srid=4326, spatial_index=True, **kwargs):
"""
- The initialization function for base spatial fields. Takes the following
- as keyword arguments:
+ The initialization function for base spatial fields. Takes the
+ following as keyword arguments:
srid:
The spatial reference system identifier, an OGC standard.
@@ -195,7 +195,8 @@ class BaseSpatialField(Field):
if isinstance(obj, GEOSGeometry):
pass
else:
- # Check if input is a candidate for conversion to raster or geometry.
+ # Check if input is a candidate for conversion to raster or
+ # geometry.
is_candidate = isinstance(obj, (bytes, str)) or hasattr(
obj, "__geo_interface__"
)
@@ -395,7 +396,8 @@ class RasterField(BaseSpatialField):
geography = False
def _check_connection(self, connection):
- # Make sure raster fields are used only on backends with raster support.
+ # Make sure raster fields are used only on backends with raster
+ # support.
if (
not connection.features.gis_enabled
or not connection.features.supports_raster
diff --git a/django/contrib/gis/db/models/functions.py b/django/contrib/gis/db/models/functions.py
index cafcd32e3b..9e94d0f77a 100644
--- a/django/contrib/gis/db/models/functions.py
+++ b/django/contrib/gis/db/models/functions.py
@@ -121,8 +121,8 @@ class GeomOutputGeoFunc(GeoFunc):
class SQLiteDecimalToFloatMixin:
"""
- By default, Decimal values are converted to str by the SQLite backend, which
- is not acceptable by the GIS functions expecting numeric values.
+ By default, Decimal values are converted to str by the SQLite backend,
+ which is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection, **extra_context):
@@ -483,7 +483,8 @@ class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
if self.source_is_geography():
clone.source_expressions.append(Value(self.spheroid))
elif self.geo_field.geodetic(connection):
- # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
+ # Geometry fields with geodetic (lon/lat) coordinates need
+ # length_spheroid
function = connection.ops.spatial_function_name("LengthSpheroid")
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
diff --git a/django/contrib/gis/db/models/lookups.py b/django/contrib/gis/db/models/lookups.py
index 49e6c8b606..3d30ffed5c 100644
--- a/django/contrib/gis/db/models/lookups.py
+++ b/django/contrib/gis/db/models/lookups.py
@@ -70,9 +70,9 @@ class GISLookup(Lookup):
return placeholder % rhs, rhs_params
def get_rhs_op(self, connection, rhs):
- # Unlike BuiltinLookup, the GIS get_rhs_op() implementation should return
- # an object (SpatialOperator) with an as_sql() method to allow for more
- # complex computations (where the lhs part can be mixed in).
+ # Unlike BuiltinLookup, the GIS get_rhs_op() implementation should
+ # return an object (SpatialOperator) with an as_sql() method to allow
+ # for more complex computations (where the lhs part can be mixed in).
return connection.ops.gis_operators[self.lookup_name]
def as_sql(self, compiler, connection):
@@ -98,8 +98,8 @@ class GISLookup(Lookup):
@BaseSpatialField.register_lookup
class OverlapsLeftLookup(GISLookup):
"""
- The overlaps_left operator returns true if A's bounding box overlaps or is to the
- left of B's bounding box.
+ The overlaps_left operator returns true if A's bounding box overlaps or is
+ to the left of B's bounding box.
"""
lookup_name = "overlaps_left"
@@ -108,8 +108,8 @@ class OverlapsLeftLookup(GISLookup):
@BaseSpatialField.register_lookup
class OverlapsRightLookup(GISLookup):
"""
- The 'overlaps_right' operator returns true if A's bounding box overlaps or is to the
- right of B's bounding box.
+ The 'overlaps_right' operator returns true if A's bounding box overlaps or
+ is to the right of B's bounding box.
"""
lookup_name = "overlaps_right"
@@ -118,8 +118,8 @@ class OverlapsRightLookup(GISLookup):
@BaseSpatialField.register_lookup
class OverlapsBelowLookup(GISLookup):
"""
- The 'overlaps_below' operator returns true if A's bounding box overlaps or is below
- B's bounding box.
+ The 'overlaps_below' operator returns true if A's bounding box overlaps or
+ is below B's bounding box.
"""
lookup_name = "overlaps_below"
@@ -128,8 +128,8 @@ class OverlapsBelowLookup(GISLookup):
@BaseSpatialField.register_lookup
class OverlapsAboveLookup(GISLookup):
"""
- The 'overlaps_above' operator returns true if A's bounding box overlaps or is above
- B's bounding box.
+ The 'overlaps_above' operator returns true if A's bounding box overlaps or
+ is above B's bounding box.
"""
lookup_name = "overlaps_above"
@@ -138,8 +138,8 @@ class OverlapsAboveLookup(GISLookup):
@BaseSpatialField.register_lookup
class LeftLookup(GISLookup):
"""
- The 'left' operator returns true if A's bounding box is strictly to the left
- of B's bounding box.
+ The 'left' operator returns true if A's bounding box is strictly to the
+ left of B's bounding box.
"""
lookup_name = "left"
@@ -148,8 +148,8 @@ class LeftLookup(GISLookup):
@BaseSpatialField.register_lookup
class RightLookup(GISLookup):
"""
- The 'right' operator returns true if A's bounding box is strictly to the right
- of B's bounding box.
+ The 'right' operator returns true if A's bounding box is strictly to the
+ right of B's bounding box.
"""
lookup_name = "right"
@@ -158,8 +158,8 @@ class RightLookup(GISLookup):
@BaseSpatialField.register_lookup
class StrictlyBelowLookup(GISLookup):
"""
- The 'strictly_below' operator returns true if A's bounding box is strictly below B's
- bounding box.
+ The 'strictly_below' operator returns true if A's bounding box is strictly
+ below B's bounding box.
"""
lookup_name = "strictly_below"
@@ -168,8 +168,8 @@ class StrictlyBelowLookup(GISLookup):
@BaseSpatialField.register_lookup
class StrictlyAboveLookup(GISLookup):
"""
- The 'strictly_above' operator returns true if A's bounding box is strictly above B's
- bounding box.
+ The 'strictly_above' operator returns true if A's bounding box is strictly
+ above B's bounding box.
"""
lookup_name = "strictly_above"
@@ -192,8 +192,8 @@ BaseSpatialField.register_lookup(SameAsLookup, "exact")
@BaseSpatialField.register_lookup
class BBContainsLookup(GISLookup):
"""
- The 'bbcontains' operator returns true if A's bounding box completely contains
- by B's bounding box.
+ The 'bbcontains' operator returns true if A's bounding box completely
+ contains by B's bounding box.
"""
lookup_name = "bbcontains"
@@ -212,8 +212,8 @@ class BBOverlapsLookup(GISLookup):
@BaseSpatialField.register_lookup
class ContainedLookup(GISLookup):
"""
- The 'contained' operator returns true if A's bounding box is completely contained
- by B's bounding box.
+ The 'contained' operator returns true if A's bounding box is completely
+ contained by B's bounding box.
"""
lookup_name = "contained"
diff --git a/django/contrib/gis/gdal/envelope.py b/django/contrib/gis/gdal/envelope.py
index 8293aa499d..f33f9b4df6 100644
--- a/django/contrib/gis/gdal/envelope.py
+++ b/django/contrib/gis/gdal/envelope.py
@@ -39,8 +39,8 @@ class Envelope:
def __init__(self, *args):
"""
- The initialization function may take an OGREnvelope structure, 4-element
- tuple or list, or 4 individual arguments.
+ The initialization function may take an OGREnvelope structure,
+ 4-element tuple or list, or 4 individual arguments.
"""
if len(args) == 1:
diff --git a/django/contrib/gis/gdal/geometries.py b/django/contrib/gis/gdal/geometries.py
index f0e56a3e01..6301cd7146 100644
--- a/django/contrib/gis/gdal/geometries.py
+++ b/django/contrib/gis/gdal/geometries.py
@@ -30,7 +30,8 @@ Example:
>>> print(mpnt.proj)
+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs
>>> print(mpnt)
- MULTIPOINT (-89.99993037860248 29.99979788655764,-89.99993037860248 29.99979788655764)
+ MULTIPOINT (-89.99993037860248 29.99979788655764,-89.99993037860248
+ 29.99979788655764)
The OGRGeomType class is to make it easy to specify an OGR geometry type:
>>> from django.contrib.gis.gdal import OGRGeomType
@@ -248,7 +249,10 @@ class OGRGeometry(GDALBase):
@property
def area(self):
- "Return the area for a LinearRing, Polygon, or MultiPolygon; 0 otherwise."
+ """
+ Return the area for a LinearRing, Polygon, or MultiPolygon; 0
+ otherwise.
+ """
return capi.get_area(self.ptr)
@property
@@ -411,7 +415,8 @@ class OGRGeometry(GDALBase):
else:
byteorder = 0 # wkbXDR
sz = self.wkb_size
- # Creating the unsigned character buffer, and passing it in by reference.
+ # Creating the unsigned character buffer, and passing it in by
+ # reference.
buf = (c_ubyte * sz)()
# For backward compatibility, export old-style 99-402 extended
# dimension types when geometry does not have an M dimension.
@@ -483,8 +488,8 @@ class OGRGeometry(GDALBase):
# #### Topology Methods ####
def _topology(self, func, other):
- """A generalized function for topology operations, takes a GDAL function and
- the other geometry to perform the operation on."""
+ """A generalized function for topology operations, takes a GDAL
+ function and the other geometry to perform the operation on."""
if not isinstance(other, OGRGeometry):
raise TypeError(
"Must use another OGRGeometry object for topology operations!"
diff --git a/django/contrib/gis/gdal/raster/band.py b/django/contrib/gis/gdal/raster/band.py
index c3ec960643..34ce39633e 100644
--- a/django/contrib/gis/gdal/raster/band.py
+++ b/django/contrib/gis/gdal/raster/band.py
@@ -71,8 +71,8 @@ class GDALBand(GDALRasterBase):
If approximate=True, the statistics may be computed based on overviews
or a subset of image tiles.
- If refresh=True, the statistics will be computed from the data directly,
- and the cache will be updated where applicable.
+ If refresh=True, the statistics will be computed from the data
+ directly, and the cache will be updated where applicable.
For empty bands (where all pixel values are nodata), all statistics
values are returned as None.
diff --git a/django/contrib/gis/gdal/raster/source.py b/django/contrib/gis/gdal/raster/source.py
index b33eb11c0f..93c5900970 100644
--- a/django/contrib/gis/gdal/raster/source.py
+++ b/django/contrib/gis/gdal/raster/source.py
@@ -204,7 +204,8 @@ class GDALRaster(GDALRasterBase):
if "skew" in ds_input:
self.skew.x, self.skew.y = ds_input["skew"]
elif isinstance(ds_input, c_void_p):
- # Instantiate the object using an existing pointer to a gdal raster.
+ # Instantiate the object using an existing pointer to a gdal
+ # raster.
self._ptr = ds_input
else:
raise GDALException(
@@ -410,11 +411,12 @@ class GDALRaster(GDALRasterBase):
name of the source raster will be used and appended with
_copy. + source_driver_name.
- In addition, the resampling algorithm can be specified with the "resampling"
- input parameter. The default is NearestNeighbor. For a list of all options
- consult the GDAL_RESAMPLE_ALGORITHMS constant.
+ In addition, the resampling algorithm can be specified with the
+ "resampling" input parameter. The default is NearestNeighbor. For a
+ list of all options consult the GDAL_RESAMPLE_ALGORITHMS constant.
"""
- # Get the parameters defining the geotransform, srid, and size of the raster
+ # Get the parameters defining the geotransform, srid, and size of the
+ # raster
ds_input.setdefault("width", self.width)
ds_input.setdefault("height", self.height)
ds_input.setdefault("srid", self.srs.srid)
diff --git a/django/contrib/gis/gdal/srs.py b/django/contrib/gis/gdal/srs.py
index bb3176c383..cd77a1bf6c 100644
--- a/django/contrib/gis/gdal/srs.py
+++ b/django/contrib/gis/gdal/srs.py
@@ -44,9 +44,9 @@ class AxisOrder(IntEnum):
class SpatialReference(GDALBase):
"""
- A wrapper for the OGRSpatialReference object. According to the GDAL web site,
- the SpatialReference object "provide[s] services to represent coordinate
- systems (projections and datums) and to transform between them."
+ A wrapper for the OGRSpatialReference object. According to the GDAL web
+ site, the SpatialReference object "provide[s] services to represent
+ coordinate systems (projections and datums) and to transform between them."
"""
destructor = capi.release_srs
@@ -150,8 +150,8 @@ class SpatialReference(GDALBase):
# #### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
- The attribute value for the given target node (e.g. 'PROJCS'). The index
- keyword specifies an index of the child node to return.
+ The attribute value for the given target node (e.g. 'PROJCS'). The
+ index keyword specifies an index of the child node to return.
"""
if not isinstance(target, str) or not isinstance(index, int):
raise TypeError
@@ -288,7 +288,9 @@ class SpatialReference(GDALBase):
@property
def local(self):
- "Return True if this SpatialReference is local (root node is LOCAL_CS)."
+ """
+ Return True if this SpatialReference is local (root node is LOCAL_CS).
+ """
return bool(capi.islocal(self.ptr))
@property
diff --git a/django/contrib/gis/geos/collections.py b/django/contrib/gis/geos/collections.py
index 41a647f234..8659b660b6 100644
--- a/django/contrib/gis/geos/collections.py
+++ b/django/contrib/gis/geos/collections.py
@@ -27,8 +27,8 @@ class GeometryCollection(GEOSGeometry):
else:
init_geoms = args
- # Ensuring that only the permitted geometries are allowed in this collection
- # this is moved to list mixin super class
+ # Ensuring that only the permitted geometries are allowed in this
+ # collection this is moved to list mixin super class
self._check_allowed(init_geoms)
# Creating the geometry pointer array.
@@ -61,14 +61,19 @@ class GeometryCollection(GEOSGeometry):
return capi.get_geomn(self.ptr, index)
def _get_single_external(self, index):
- "Return the Geometry from this Collection at the given index (0-based)."
+ """
+ Return the Geometry from this Collection at the given index (0-based).
+ """
# Checking the index and returning the corresponding GEOS geometry.
return GEOSGeometry(
capi.geom_clone(self._get_single_internal(index)), srid=self.srid
)
def _set_list(self, length, items):
- "Create a new collection, and destroy the contents of the previous pointer."
+ """
+ Create a new collection, and destroy the contents of the previous
+ pointer.
+ """
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_collection(length, items)
diff --git a/django/contrib/gis/geos/coordseq.py b/django/contrib/gis/geos/coordseq.py
index a9ec4d2bf0..e54f3f2714 100644
--- a/django/contrib/gis/geos/coordseq.py
+++ b/django/contrib/gis/geos/coordseq.py
@@ -180,7 +180,8 @@ class GEOSCoordSeq(GEOSBase):
@property
def kml(self):
"Return the KML representation for the coordinates."
- # Getting the substitution string depending on whether the coordinates have
+ # Getting the substitution string depending on whether the coordinates
+ # have
# a Z dimension.
if self.hasz:
substr = "%s,%s,%s "
diff --git a/django/contrib/gis/geos/geometry.py b/django/contrib/gis/geos/geometry.py
index cfc2d695ea..48658c4218 100644
--- a/django/contrib/gis/geos/geometry.py
+++ b/django/contrib/gis/geos/geometry.py
@@ -76,9 +76,10 @@ class GEOSGeometryBase(GEOSBase):
def __deepcopy__(self, memodict):
"""
- The `deepcopy` routine is used by the `Node` class of django.utils.tree;
- thus, the protocol routine needs to be implemented to return correct
- copies (clones) of these GEOS objects, which use C pointers.
+ The `deepcopy` routine is used by the `Node` class of
+ django.utils.tree; thus, the protocol routine needs to be implemented
+ to return correct copies (clones) of these GEOS objects, which use C
+ pointers.
"""
return self.clone()
@@ -343,7 +344,8 @@ class GEOSGeometryBase(GEOSBase):
def overlaps(self, other):
"""
Return true if the DE-9IM intersection matrix for the two Geometries
- is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
+ is T*T***T** (for two points or two surfaces) 1*T***T** (for two
+ curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
@@ -542,9 +544,9 @@ class GEOSGeometryBase(GEOSBase):
"""
Return a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
- Spatial Reference System of this Geometry. The optional third parameter sets
- the number of segment used to approximate a quarter circle (defaults to 8).
- (Text from PostGIS documentation at ch. 6.1.3)
+ Spatial Reference System of this Geometry. The optional third parameter
+ sets the number of segment used to approximate a quarter circle
+ (defaults to 8). (Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@@ -567,9 +569,9 @@ class GEOSGeometryBase(GEOSBase):
@property
def centroid(self):
"""
- The centroid is equal to the centroid of the set of component Geometries
- of highest dimension (since the lower-dimension geometries contribute zero
- "weight" to the centroid).
+ The centroid is equal to the centroid of the set of component
+ Geometries of highest dimension (since the lower-dimension geometries
+ contribute zero "weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@@ -594,7 +596,10 @@ class GEOSGeometryBase(GEOSBase):
return self._topology(capi.geos_envelope(self.ptr))
def intersection(self, other):
- "Return a Geometry representing the points shared by this Geometry and other."
+ """
+ Return a Geometry representing the points shared by this Geometry and
+ other.
+ """
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
@@ -603,7 +608,9 @@ class GEOSGeometryBase(GEOSBase):
return self._topology(capi.geos_pointonsurface(self.ptr))
def relate(self, other):
- "Return the DE-9IM intersection matrix for this Geometry and the other."
+ """
+ Return the DE-9IM intersection matrix for this Geometry and the other.
+ """
return capi.geos_relate(self.ptr, other.ptr).decode()
def simplify(self, tolerance=0.0, preserve_topology=False):
@@ -636,7 +643,10 @@ class GEOSGeometryBase(GEOSBase):
return self._topology(capi.geos_unary_union(self.ptr))
def union(self, other):
- "Return a Geometry representing all the points in this Geometry and other."
+ """
+ Return a Geometry representing all the points in this Geometry and
+ other.
+ """
return self._topology(capi.geos_union(self.ptr, other.ptr))
# #### Other Routines ####
diff --git a/django/contrib/gis/geos/libgeos.py b/django/contrib/gis/geos/libgeos.py
index 501c28c6d3..99a4f2f94e 100644
--- a/django/contrib/gis/geos/libgeos.py
+++ b/django/contrib/gis/geos/libgeos.py
@@ -57,7 +57,8 @@ def load_geos():
)
# Getting the GEOS C library. The C interface (CDLL) is used for
# both *NIX and Windows.
- # See the GEOS C API source code for more details on the library function calls:
+ # See the GEOS C API source code for more details on the library function
+ # calls:
# https://libgeos.org/doxygen/geos__c_8h_source.html
_lgeos = CDLL(lib_path)
# Here we set up the prototypes for the initGEOS_r and finishGEOS_r
diff --git a/django/contrib/gis/geos/point.py b/django/contrib/gis/geos/point.py
index a1a0a33e00..90f0793e16 100644
--- a/django/contrib/gis/geos/point.py
+++ b/django/contrib/gis/geos/point.py
@@ -26,7 +26,8 @@ class Point(GEOSGeometry):
# Here a tuple or list was passed in under the `x` parameter.
coords = x
elif isinstance(x, (float, int)) and isinstance(y, (float, int)):
- # Here X, Y, and (optionally) Z were passed in individually, as parameters.
+ # Here X, Y, and (optionally) Z were passed in individually, as
+ # parameters.
if isinstance(z, (float, int)):
coords = [x, y, z]
else:
diff --git a/django/contrib/gis/geos/polygon.py b/django/contrib/gis/geos/polygon.py
index 554447c73f..c6b96607a9 100644
--- a/django/contrib/gis/geos/polygon.py
+++ b/django/contrib/gis/geos/polygon.py
@@ -69,8 +69,8 @@ class Polygon(GEOSGeometry):
# ### These routines are needed for list-like operation w/ListMixin ###
def _create_polygon(self, length, items):
# Instantiate LinearRing objects if necessary, but don't clone them yet
- # _construct_ring will throw a TypeError if a parameter isn't a valid ring
- # If we cloned the pointers here, we wouldn't be able to clean up
+ # _construct_ring will throw a TypeError if a parameter isn't a valid
+ # ring If we cloned the pointers here, we wouldn't be able to clean up
# in case of error.
if not length:
return capi.create_empty_polygon()
diff --git a/django/contrib/gis/geos/prototypes/errcheck.py b/django/contrib/gis/geos/prototypes/errcheck.py
index 5ee43999fa..044bf8bc5c 100644
--- a/django/contrib/gis/geos/prototypes/errcheck.py
+++ b/django/contrib/gis/geos/prototypes/errcheck.py
@@ -19,7 +19,9 @@ def last_arg_byref(args):
def check_dbl(result, func, cargs):
- "Check the status code and returns the double value passed in by reference."
+ """
+ Check the status code and returns the double value passed in by reference.
+ """
# Checking the status code
if result != 1:
return None
diff --git a/django/contrib/gis/measure.py b/django/contrib/gis/measure.py
index 707c061a29..71dc130ac4 100644
--- a/django/contrib/gis/measure.py
+++ b/django/contrib/gis/measure.py
@@ -1,8 +1,8 @@
# Copyright (c) 2007, Robert Coup
# All rights reserved.
#
-# Redistribution and use in source and binary forms, with or without modification,
-# are permitted provided that the following conditions are met:
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
@@ -11,20 +11,21 @@
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
-# 3. Neither the name of Distance nor the names of its contributors may be used
-# to endorse or promote products derived from this software without
+# 3. Neither the name of Distance nor the names of its contributors may be
+# used to endorse or promote products derived from this software without
# specific prior written permission.
#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
#
"""
Distance and Area objects to allow for sensible and convenient calculation
diff --git a/django/contrib/gis/utils/layermapping.py b/django/contrib/gis/utils/layermapping.py
index e2bf30200e..a4cd04dc05 100644
--- a/django/contrib/gis/utils/layermapping.py
+++ b/django/contrib/gis/utils/layermapping.py
@@ -107,10 +107,10 @@ class LayerMapping:
using=None,
):
"""
- A LayerMapping object is initialized using the given Model (not an instance),
- a DataSource (or string path to an OGR-supported data file), and a mapping
- dictionary. See the module level docstring for more details and keyword
- argument usage.
+ A LayerMapping object is initialized using the given Model (not an
+ instance), a DataSource (or string path to an OGR-supported data file),
+ and a mapping dictionary. See the module level docstring for more
+ details and keyword argument usage.
"""
# Getting the DataSource and the associated Layer.
if isinstance(data, (str, Path)):
@@ -227,7 +227,8 @@ class LayerMapping:
'Given mapping field "%s" not in given Model fields.' % field_name
)
- # Getting the string name for the Django field class (e.g., 'PointField').
+ # Getting the string name for the Django field class (e.g.,
+ # 'PointField').
fld_name = model_field.__class__.__name__
if isinstance(model_field, GeometryField):
@@ -262,9 +263,9 @@ class LayerMapping:
% (fld_name, "(dim=3)" if coord_dim == 3 else "", ltype)
)
- # Setting the `geom_field` attribute w/the name of the model field
- # that is a Geometry. Also setting the coordinate dimension
- # attribute.
+ # Setting the `geom_field` attribute w/the name of the model
+ # field that is a Geometry. Also setting the coordinate
+ # dimension attribute.
self.geom_field = field_name
self.coord_dim = coord_dim
fields_val = model_field
@@ -424,7 +425,8 @@ class LayerMapping:
digits = dtup[1]
d_idx = dtup[2] # index where the decimal is
- # Maximum amount of precision, or digits to the left of the decimal.
+ # Maximum amount of precision, or digits to the left of the
+ # decimal.
max_prec = model_field.max_digits - model_field.decimal_places
# Getting the digits to the left of the decimal place for the
@@ -446,7 +448,8 @@ class LayerMapping:
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(
model_field, models.IntegerField
):
- # Attempt to convert any OFTReal and OFTString value to an OFTInteger.
+ # Attempt to convert any OFTReal and OFTString value to an
+ # OFTInteger.
try:
val = int(ogr_field.value)
except ValueError:
@@ -533,7 +536,10 @@ class LayerMapping:
) from exc
def geometry_field(self):
- "Return the GeometryField instance associated with the geographic column."
+ """
+ Return the GeometryField instance associated with the geographic
+ column.
+ """
# Use `get_field()` on the model's options so that we
# get the correct field instance if there's model inheritance.
opts = self.model._meta
@@ -542,7 +548,8 @@ class LayerMapping:
def make_multi(self, geom_type, model_field):
"""
Given the OGRGeomType for a geometry and its associated GeometryField,
- determine whether the geometry should be turned into a GeometryCollection.
+ determine whether the geometry should be turned into a
+ GeometryCollection.
"""
return (
geom_type.num in self.MULTI_TYPES
@@ -583,12 +590,13 @@ class LayerMapping:
When this keyword is set, status information will be printed giving
the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed,
- however, this default may be overridden by setting this keyword with an
- integer for the desired interval.
+ however, this default may be overridden by setting this keyword with
+ an integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
- using `sys.stdout`, but any object with a `write` method is supported.
+ using `sys.stdout`, but any object with a `write` method is
+ supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
@@ -631,8 +639,8 @@ class LayerMapping:
# Constructing the model using the keyword args
is_update = False
if self.unique:
- # If we want unique models on a particular field, handle the
- # geometry appropriately.
+ # If we want unique models on a particular field,
+ # handle the geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
@@ -688,8 +696,8 @@ class LayerMapping:
"Processed %d features, saved %d ...\n" % (num_feat, num_saved)
)
- # Only used for status output purposes -- incremental saving uses the
- # values returned here.
+ # Only used for status output purposes -- incremental saving uses
+ # the values returned here.
return num_saved, num_feat
if self.transaction_decorator is not None:
@@ -708,8 +716,8 @@ class LayerMapping:
n_i = len(indices)
for i, end in enumerate(indices):
- # Constructing the slice to use for this step; the last slice is
- # special (e.g, [100:] instead of [90:100]).
+ # Constructing the slice to use for this step; the last slice
+ # is special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i:
step_slice = slice(beg, None)
else:
diff --git a/django/contrib/gis/utils/ogrinspect.py b/django/contrib/gis/utils/ogrinspect.py
index 63353690d9..96f58c9ff1 100644
--- a/django/contrib/gis/utils/ogrinspect.py
+++ b/django/contrib/gis/utils/ogrinspect.py
@@ -141,8 +141,9 @@ def _ogrinspect(
null=False,
):
"""
- Helper routine for `ogrinspect` that generates GeoDjango models corresponding
- to the given data source. See the `ogrinspect` docstring for more details.
+ Helper routine for `ogrinspect` that generates GeoDjango models
+ corresponding to the given data source. See the `ogrinspect` docstring for
+ more details.
"""
# Getting the DataSource
if isinstance(data_source, str):
diff --git a/django/contrib/humanize/templatetags/humanize.py b/django/contrib/humanize/templatetags/humanize.py
index 7e2e3b5fed..91ffe3acad 100644
--- a/django/contrib/humanize/templatetags/humanize.py
+++ b/django/contrib/humanize/templatetags/humanize.py
@@ -39,11 +39,14 @@ def ordinal(value):
templates = (
# Translators: Ordinal format when value ends with 0, e.g. 80th.
pgettext("ordinal 0", "{}th"),
- # Translators: Ordinal format when value ends with 1, e.g. 81st, except 11.
+ # Translators: Ordinal format when value ends with 1, e.g. 81st,
+ # except 11.
pgettext("ordinal 1", "{}st"),
- # Translators: Ordinal format when value ends with 2, e.g. 82nd, except 12.
+ # Translators: Ordinal format when value ends with 2, e.g. 82nd,
+ # except 12.
pgettext("ordinal 2", "{}nd"),
- # Translators: Ordinal format when value ends with 3, e.g. 83rd, except 13.
+ # Translators: Ordinal format when value ends with 3, e.g. 83rd,
+ # except 13.
pgettext("ordinal 3", "{}rd"),
# Translators: Ordinal format when value ends with 4, e.g. 84th.
pgettext("ordinal 4", "{}th"),
@@ -212,7 +215,8 @@ def naturaltime(value):
class NaturalTimeFormatter:
time_strings = {
- # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks'
+ # Translators: delta will contain a string like '2 months' or '1 month,
+ # 2 weeks'
"past-day": gettext_lazy("%(delta)s ago"),
# Translators: please keep a non-breaking space (U+00A0) between count
# and time unit.
@@ -225,25 +229,27 @@ class NaturalTimeFormatter:
"past-second": ngettext_lazy("a second ago", "%(count)s seconds ago", "count"),
"now": gettext_lazy("now"),
# fmt: off
- # fmt turned off to avoid black splitting the ngettext_lazy calls to multiple
- # lines, as this results in gettext missing the 'Translators:' comments.
+ # fmt turned off to avoid black splitting the ngettext_lazy calls to
+ # multiple lines, as this results in gettext missing the 'Translators:'
+ # comments.
"future-second": ngettext_lazy(
- # Translators: please keep a non-breaking space (U+00A0) between count
- # and time unit.
+ # Translators: please keep a non-breaking space (U+00A0) between
+ # count and time unit.
"a second from now", "%(count)s seconds from now", "count"
),
"future-minute": ngettext_lazy(
- # Translators: please keep a non-breaking space (U+00A0) between count
- # and time unit.
+ # Translators: please keep a non-breaking space (U+00A0) between
+ # count and time unit.
"a minute from now", "%(count)s minutes from now", "count",
),
"future-hour": ngettext_lazy(
- # Translators: please keep a non-breaking space (U+00A0) between count
- # and time unit.
+ # Translators: please keep a non-breaking space (U+00A0) between
+ # count and time unit.
"an hour from now", "%(count)s hours from now", "count",
),
# fmt: on
- # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks'
+ # Translators: delta will contain a string like '2 months' or '1 month,
+ # 2 weeks'
"future-day": gettext_lazy("%(delta)s from now"),
}
past_substrings = {
diff --git a/django/contrib/postgres/signals.py b/django/contrib/postgres/signals.py
index a3816d3d30..00ab421e04 100644
--- a/django/contrib/postgres/signals.py
+++ b/django/contrib/postgres/signals.py
@@ -58,12 +58,12 @@ else:
oids, array_oids = get_hstore_oids(connection.alias)
# Don't register handlers when hstore is not available on the database.
#
- # If someone tries to create an hstore field it will error there. This is
- # necessary as someone may be using PSQL without extensions installed but
- # be using other features of contrib.postgres.
+ # If someone tries to create an hstore field it will error there. This
+ # is necessary as someone may be using PSQL without extensions
+ # installed but be using other features of contrib.postgres.
#
- # This is also needed in order to create the connection in order to install
- # the hstore extension.
+ # This is also needed in order to create the connection in order to
+ # install the hstore extension.
if oids:
register_hstore(
connection.connection, globally=True, oid=oids, array_oid=array_oids
@@ -72,7 +72,8 @@ else:
oids, citext_oids = get_citext_oids(connection.alias)
# Don't register handlers when citext is not available on the database.
#
- # The same comments in the above call to register_hstore() also apply here.
+ # The same comments in the above call to register_hstore() also apply
+ # here.
if oids:
array_type = psycopg2.extensions.new_array_type(
citext_oids, "citext[]", psycopg2.STRING
diff --git a/django/contrib/sessions/backends/base.py b/django/contrib/sessions/backends/base.py
index 69f756a228..e53f1d201a 100644
--- a/django/contrib/sessions/backends/base.py
+++ b/django/contrib/sessions/backends/base.py
@@ -120,7 +120,9 @@ class SessionBase:
del (await self._aget_session())[self.TEST_COOKIE_NAME]
def encode(self, session_dict):
- "Return the given session dictionary serialized and encoded as a string."
+ """
+ Return the given session dictionary serialized and encoded as a string.
+ """
return signing.dumps(
session_dict,
salt=self.key_salt,
diff --git a/django/contrib/sitemaps/__init__.py b/django/contrib/sitemaps/__init__.py
index dd21d8829d..c0c0ac77a7 100644
--- a/django/contrib/sitemaps/__init__.py
+++ b/django/contrib/sitemaps/__init__.py
@@ -51,7 +51,8 @@ class Sitemap:
def _items(self):
if self.i18n:
# Create (item, lang_code) tuples for all items and languages.
- # This is necessary to paginate with all languages already considered.
+ # This is necessary to paginate with all languages already
+ # considered.
items = [
(item, lang_code)
for item in self.items()
@@ -63,7 +64,8 @@ class Sitemap:
def _location(self, item, force_lang_code=None):
if self.i18n:
obj, lang_code = item
- # Activate language from item-tuple or forced one before calling location.
+ # Activate language from item-tuple or forced one before calling
+ # location.
with translation.override(force_lang_code or lang_code):
return self._get("location", item)
return self._get("location", item)
diff --git a/django/contrib/staticfiles/storage.py b/django/contrib/staticfiles/storage.py
index 2ec107d55a..b16c77757c 100644
--- a/django/contrib/staticfiles/storage.py
+++ b/django/contrib/staticfiles/storage.py
@@ -239,15 +239,18 @@ class HashedFilesMixin:
return matched
if url_path.startswith("/"):
- # Otherwise the condition above would have returned prematurely.
+ # Otherwise the condition above would have returned
+ # prematurely.
assert url_path.startswith(settings.STATIC_URL)
target_name = url_path.removeprefix(settings.STATIC_URL)
else:
- # We're using the posixpath module to mix paths and URLs conveniently.
+ # We're using the posixpath module to mix paths and URLs
+ # conveniently.
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
- # Determine the hashed name of the target file with the storage backend.
+ # Determine the hashed name of the target file with the storage
+ # backend.
hashed_url = self._url(
self._stored_name,
unquote(target_name),
@@ -280,8 +283,8 @@ class HashedFilesMixin:
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
- If either of these are performed on a file, then that file is considered
- post-processed.
+ If either of these are performed on a file, then that file is
+ considered post-processed.
"""
# don't even dare to process the files if we're in dry run mode
if dry_run:
@@ -448,7 +451,8 @@ class HashedFilesMixin:
# Move on to the next intermediate file.
intermediate_name = cache_name
# If the cache name can't be determined after the max number of passes,
- # the intermediate files on disk may be corrupt; avoid an infinite loop.
+ # the intermediate files on disk may be corrupt; avoid an infinite
+ # loop.
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
diff --git a/django/core/cache/backends/base.py b/django/core/cache/backends/base.py
index aef9986389..f1b731aee1 100644
--- a/django/core/cache/backends/base.py
+++ b/django/core/cache/backends/base.py
@@ -283,8 +283,8 @@ class BaseCache:
def decr(self, key, delta=1, version=None):
"""
- Subtract delta from value in the cache. If the key does not exist, raise
- a ValueError exception.
+ Subtract delta from value in the cache. If the key does not exist,
+ raise a ValueError exception.
"""
return self.incr(key, -delta, version=version)
diff --git a/django/core/cache/backends/db.py b/django/core/cache/backends/db.py
index b3f4eab7c1..03add8c4a5 100644
--- a/django/core/cache/backends/db.py
+++ b/django/core/cache/backends/db.py
@@ -136,9 +136,9 @@ class DatabaseCache(BaseDatabaseCache):
b64encoded = base64.b64encode(pickled).decode("latin1")
try:
# Note: typecasting for datetimes is needed by some 3rd party
- # database backends. All core backends work without typecasting,
- # so be careful about changes here - test suite will NOT pick
- # regressions.
+ # database backends. All core backends work without
+ # typecasting, so be careful about changes here - test suite
+ # will NOT pick regressions.
with transaction.atomic(using=db):
cursor.execute(
"SELECT %s, %s FROM %s WHERE %s = %%s"
@@ -198,7 +198,8 @@ class DatabaseCache(BaseDatabaseCache):
else:
return False # touch failed.
except DatabaseError:
- # To be threadsafe, updates/inserts are allowed to fail silently
+ # To be threadsafe, updates/inserts are allowed to fail
+ # silently
return False
else:
return True
diff --git a/django/core/checks/messages.py b/django/core/checks/messages.py
index db7aa55119..5ba48abe5a 100644
--- a/django/core/checks/messages.py
+++ b/django/core/checks/messages.py
@@ -29,7 +29,8 @@ class CheckMessage:
obj = "?"
elif isinstance(self.obj, models.base.ModelBase):
# We need to hardcode ModelBase and Field cases because its __str__
- # method doesn't return "applabel.modellabel" and cannot be changed.
+ # method doesn't return "applabel.modellabel" and cannot be
+ # changed.
obj = self.obj._meta.label
else:
obj = str(self.obj)
diff --git a/django/core/checks/security/base.py b/django/core/checks/security/base.py
index 9506052196..7d9631df1e 100644
--- a/django/core/checks/security/base.py
+++ b/django/core/checks/security/base.py
@@ -266,7 +266,8 @@ def check_referrer_policy(app_configs, **kwargs):
if _security_middleware():
if settings.SECURE_REFERRER_POLICY is None:
return [W022]
- # Support a comma-separated string or iterable of values to allow fallback.
+ # Support a comma-separated string or iterable of values to allow
+ # fallback.
if isinstance(settings.SECURE_REFERRER_POLICY, str):
values = {v.strip() for v in settings.SECURE_REFERRER_POLICY.split(",")}
else:
diff --git a/django/core/files/base.py b/django/core/files/base.py
index 9682467afa..b8613ffc55 100644
--- a/django/core/files/base.py
+++ b/django/core/files/base.py
@@ -67,8 +67,8 @@ class File(FileProxyMixin):
Return ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
- always return ``False`` -- there's no good reason to read from memory in
- chunks.
+ always return ``False`` -- there's no good reason to read from memory
+ in chunks.
"""
return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE)
diff --git a/django/core/files/move.py b/django/core/files/move.py
index 1605bebc1a..eff11b111b 100644
--- a/django/core/files/move.py
+++ b/django/core/files/move.py
@@ -19,8 +19,9 @@ def file_move_safe(
"""
Move a file from one location to another in the safest way possible.
- First, try ``os.rename``, which is simple but will break across filesystems.
- If that fails, stream manually from one file to another in pure Python.
+ First, try ``os.rename``, which is simple but will break across
+ filesystems. If that fails, stream manually from one file to another in
+ pure Python.
If the destination file exists and ``allow_overwrite`` is ``False``, raise
``FileExistsError``.
diff --git a/django/core/files/storage/base.py b/django/core/files/storage/base.py
index 31ecbd209a..612c8cc357 100644
--- a/django/core/files/storage/base.py
+++ b/django/core/files/storage/base.py
@@ -149,8 +149,8 @@ class Storage:
def exists(self, name):
"""
- Return True if a file referenced by the given name already exists in the
- storage system, or False if the name is available for a new file.
+ Return True if a file referenced by the given name already exists in
+ the storage system, or False if the name is available for a new file.
"""
raise NotImplementedError(
"subclasses of Storage must provide an exists() method"
diff --git a/django/core/files/storage/filesystem.py b/django/core/files/storage/filesystem.py
index 428ae61b40..9592bff008 100644
--- a/django/core/files/storage/filesystem.py
+++ b/django/core/files/storage/filesystem.py
@@ -104,8 +104,9 @@ class FileSystemStorage(Storage, StorageSettingsMixin):
# This is a normal uploadedfile that we can stream.
else:
- # The combination of O_CREAT and O_EXCL makes os.open() raises an
- # OSError if the file already exists before it's opened.
+ # The combination of O_CREAT and O_EXCL makes os.open()
+ # raises an OSError if the file already exists before it's
+ # opened.
open_flags = (
os.O_WRONLY
| os.O_CREAT
diff --git a/django/core/files/uploadedfile.py b/django/core/files/uploadedfile.py
index efbfcac4c8..1d006ede4f 100644
--- a/django/core/files/uploadedfile.py
+++ b/django/core/files/uploadedfile.py
@@ -54,7 +54,8 @@ class UploadedFile(File):
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
- # File names longer than 255 characters can cause problems on older OSes.
+ # File names longer than 255 characters can cause problems on older
+ # OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
@@ -126,7 +127,8 @@ class InMemoryUploadedFile(UploadedFile):
class SimpleUploadedFile(InMemoryUploadedFile):
"""
- A simple representation of a file, which just has content, size, and a name.
+ A simple representation of a file, which just has content, size, and a
+ name.
"""
def __init__(self, name, content, content_type="text/plain"):
diff --git a/django/core/files/uploadhandler.py b/django/core/files/uploadhandler.py
index ab86f7fede..133c0a597f 100644
--- a/django/core/files/uploadhandler.py
+++ b/django/core/files/uploadhandler.py
@@ -37,8 +37,8 @@ class StopUpload(UploadFileException):
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
- without consuming the rest of the upload. This will cause the browser to
- show a "connection reset" error.
+ without consuming the rest of the upload. This will cause the browser
+ to show a "connection reset" error.
"""
self.connection_reset = connection_reset
@@ -51,7 +51,8 @@ class StopUpload(UploadFileException):
class SkipFile(UploadFileException):
"""
- This exception is raised by an upload handler that wants to skip a given file.
+ This exception is raised by an upload handler that wants to skip a given
+ file.
"""
pass
diff --git a/django/core/handlers/asgi.py b/django/core/handlers/asgi.py
index 10d18b60eb..beace7597c 100644
--- a/django/core/handlers/asgi.py
+++ b/django/core/handlers/asgi.py
@@ -331,8 +331,8 @@ class ASGIHandler(base.BaseHandler):
)
# Streaming responses need to be pinned to their iterator.
if response.streaming:
- # - Consume via `__aiter__` and not `streaming_content` directly, to
- # allow mapping of a sync iterator.
+ # - Consume via `__aiter__` and not `streaming_content` directly,
+ # to allow mapping of a sync iterator.
# - Use aclosing() when consuming aiter. See
# https://github.com/python/cpython/commit/6e8dcdaaa49d4313bf9fab9f9923ca5828fbb10e
async with aclosing(aiter(response)) as content:
@@ -342,8 +342,9 @@ class ASGIHandler(base.BaseHandler):
{
"type": "http.response.body",
"body": chunk,
- # Ignore "more" as there may be more parts; instead,
- # use an empty final closing message with False.
+ # Ignore "more" as there may be more parts;
+ # instead, use an empty final closing message
+ # with False.
"more_body": True,
}
)
diff --git a/django/core/handlers/base.py b/django/core/handlers/base.py
index 8911543d4e..af3e0c3a50 100644
--- a/django/core/handlers/base.py
+++ b/django/core/handlers/base.py
@@ -27,7 +27,8 @@ class BaseHandler:
"""
Populate middleware lists from settings.MIDDLEWARE.
- Must be called after the environment is fixed (see __call__ in subclasses).
+ Must be called after the environment is fixed (see __call__ in
+ subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
diff --git a/django/core/mail/message.py b/django/core/mail/message.py
index 93269d0310..3fd2ef6656 100644
--- a/django/core/mail/message.py
+++ b/django/core/mail/message.py
@@ -153,7 +153,8 @@ class MIMEMixin:
class SafeMIMEMessage(MIMEMixin, MIMEMessage):
def __setitem__(self, name, val):
- # Per RFC 2046 Section 5.2.1, message/rfc822 attachment headers must be ASCII.
+ # Per RFC 2046 Section 5.2.1, message/rfc822 attachment headers must be
+ # ASCII.
name, val = forbid_multi_line_headers(name, val, "ascii")
MIMEMessage.__setitem__(self, name, val)
diff --git a/django/core/management/__init__.py b/django/core/management/__init__.py
index 0c16447d58..f547ef730c 100644
--- a/django/core/management/__init__.py
+++ b/django/core/management/__init__.py
@@ -117,7 +117,8 @@ def call_command(command_name, *args, **options):
else:
command = load_command_class(app_name, command_name)
- # Simulate argument parsing to get the option defaults (see #10080 for details).
+ # Simulate argument parsing to get the option defaults (see #10080 for
+ # details).
parser = command.create_parser("", command_name)
# Use the `dest` option name from the parser option
opt_mapping = {
@@ -256,9 +257,9 @@ class ManagementUtility:
except KeyError:
if os.environ.get("DJANGO_SETTINGS_MODULE"):
# If `subcommand` is missing due to misconfigured settings, the
- # following line will retrigger an ImproperlyConfigured exception
- # (get_commands() swallows the original one) so the user is
- # informed about it.
+ # following line will retrigger an ImproperlyConfigured
+ # exception (get_commands() swallows the original one) so the
+ # user is informed about it.
settings.INSTALLED_APPS
elif not settings.configured:
sys.stderr.write("No Django settings specified.\n")
diff --git a/django/core/management/commands/diffsettings.py b/django/core/management/commands/diffsettings.py
index 047e4764a8..5cc2e6d674 100644
--- a/django/core/management/commands/diffsettings.py
+++ b/django/core/management/commands/diffsettings.py
@@ -45,7 +45,8 @@ class Command(BaseCommand):
def handle(self, **options):
from django.conf import Settings, global_settings, settings
- # Because settings are imported lazily, we need to explicitly load them.
+ # Because settings are imported lazily, we need to explicitly load
+ # them.
if not settings.configured:
settings._setup()
diff --git a/django/core/management/commands/dumpdata.py b/django/core/management/commands/dumpdata.py
index 5a9ab83919..15e615c1d0 100644
--- a/django/core/management/commands/dumpdata.py
+++ b/django/core/management/commands/dumpdata.py
@@ -229,7 +229,8 @@ class Command(BaseCommand):
self.stdout.ending = None
progress_output = None
object_count = 0
- # If dumpdata is outputting to stdout, there is no way to display progress
+ # If dumpdata is outputting to stdout, there is no way to display
+ # progress
if output and self.stdout.isatty() and options["verbosity"] > 0:
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py
index a057393d53..ff7ee45557 100644
--- a/django/core/management/commands/flush.py
+++ b/django/core/management/commands/flush.py
@@ -86,8 +86,9 @@ Are you sure you want to do this?
# Empty sql_list may signify an empty database and post_migrate
# would then crash.
if sql_list and not inhibit_post_migrate:
- # Emit the post migrate signal. This allows individual applications to
- # respond as if the database had been migrated from scratch.
+ # Emit the post migrate signal. This allows individual
+ # applications to respond as if the database had been migrated
+ # from scratch.
emit_post_migrate_signal(verbosity, interactive, database)
else:
self.stdout.write("Flush cancelled.")
diff --git a/django/core/management/commands/inspectdb.py b/django/core/management/commands/inspectdb.py
index 81f0cbefea..8c271498c6 100644
--- a/django/core/management/commands/inspectdb.py
+++ b/django/core/management/commands/inspectdb.py
@@ -193,8 +193,8 @@ class Command(BaseCommand):
)
used_relations.add(rel_to)
else:
- # Calling `get_field_type` to get the field type string and any
- # additional parameters and notes.
+ # Calling `get_field_type` to get the field type string
+ # and any additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(
connection, table_name, row
)
@@ -203,8 +203,8 @@ class Command(BaseCommand):
field_type += "("
- # Don't output 'id = meta.AutoField(primary_key=True)', because
- # that's assumed if it doesn't exist.
+ # Don't output 'id = meta.AutoField(primary_key=True)',
+ # because that's assumed if it doesn't exist.
if att_name == "id" and extra_params == {"primary_key": True}:
if field_type == "AutoField(":
continue
@@ -215,8 +215,8 @@ class Command(BaseCommand):
):
comment_notes.append("AutoField?")
- # Add 'null' and 'blank', if the 'null_ok' flag was present in the
- # table description.
+ # Add 'null' and 'blank', if the 'null_ok' flag was present
+ # in the table description.
if row.null_ok: # If it's NULL...
extra_params["blank"] = True
extra_params["null"] = True
@@ -287,7 +287,8 @@ class Command(BaseCommand):
while new_name.find(LOOKUP_SEP) >= 0:
new_name = new_name.replace(LOOKUP_SEP, "_")
if col_name.lower().find(LOOKUP_SEP) >= 0:
- # Only add the comment if the double underscore was in the original name
+ # Only add the comment if the double underscore was in the
+ # original name
field_notes.append(
"Field renamed because it contained more than one '_' in a row."
)
diff --git a/django/core/management/commands/makemessages.py b/django/core/management/commands/makemessages.py
index ac67a5bc9f..60fe295ac1 100644
--- a/django/core/management/commands/makemessages.py
+++ b/django/core/management/commands/makemessages.py
@@ -420,9 +420,11 @@ class Command(BaseCommand):
for locale in locales:
if not is_valid_locale(locale):
# Try to guess what valid locale it could be
- # Valid examples are: en_GB, shi_Latn_MA and nl_NL-x-informal
+ # Valid examples are: en_GB, shi_Latn_MA and
+ # nl_NL-x-informal
- # Search for characters followed by a non character (i.e. separator)
+ # Search for characters followed by a non character (i.e.
+ # separator)
match = re.match(
r"^(?P[a-zA-Z]+)"
r"(?P[^a-zA-Z])"
@@ -464,8 +466,9 @@ class Command(BaseCommand):
@cached_property
def gettext_version(self):
- # Gettext tools will output system-encoded bytestrings instead of UTF-8,
- # when looking up the version. It's especially a problem on Windows.
+ # Gettext tools will output system-encoded bytestrings instead of
+ # UTF-8, when looking up the version. It's especially a problem on
+ # Windows.
out, err, status = popen_wrapper(
["xgettext", "--version"],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py
index 690d0e5053..7f711ed7ae 100644
--- a/django/core/management/commands/makemigrations.py
+++ b/django/core/management/commands/makemigrations.py
@@ -139,7 +139,8 @@ class Command(BaseCommand):
# the loader doesn't try to resolve replaced migrations from DB.
loader = MigrationLoader(None, ignore_no_migrations=True)
- # Raise an error if any migrations are applied before their dependencies.
+ # Raise an error if any migrations are applied before their
+ # dependencies.
consistency_check_labels = {config.label for config in apps.get_app_configs()}
# Non-default databases are only checked if database routers used.
aliases_to_check = (
@@ -186,7 +187,8 @@ class Command(BaseCommand):
"'python manage.py makemigrations --merge'" % name_str
)
- # If they want to merge and there's nothing to merge, then politely exit
+ # If they want to merge and there's nothing to merge, then politely
+ # exit
if self.merge and not conflicts:
self.log("No conflicts detected to merge.")
return
@@ -505,9 +507,9 @@ class Command(BaseCommand):
if self.scriptable:
self.stdout.write(writer.path)
elif self.verbosity == 3:
- # Alternatively, makemigrations --merge --dry-run --verbosity 3
- # will log the merge migrations rather than saving the file
- # to the disk.
+ # Alternatively, makemigrations --merge --dry-run
+ # --verbosity 3 will log the merge migrations rather than
+ # saving the file to the disk.
self.log(
self.style.MIGRATE_HEADING(
"Full merge migrations file '%s':" % writer.filename
diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py
index 4ef6e1a87c..268f669ba2 100644
--- a/django/core/management/commands/migrate.py
+++ b/django/core/management/commands/migrate.py
@@ -113,7 +113,8 @@ class Command(BaseCommand):
# Work out which apps have migrations and which do not
executor = MigrationExecutor(connection, self.migration_progress_callback)
- # Raise an error if any migrations are applied before their dependencies.
+ # Raise an error if any migrations are applied before their
+ # dependencies.
executor.loader.check_consistent_history(connection)
# Before anything else, see if there's conflicting apps and drop out
@@ -357,8 +358,8 @@ class Command(BaseCommand):
fake=fake,
fake_initial=fake_initial,
)
- # post_migrate signals have access to all models. Ensure that all models
- # are reloaded in case any are delayed.
+ # post_migrate signals have access to all models. Ensure that all
+ # models are reloaded in case any are delayed.
post_migrate_state.clear_delayed_apps_cache()
post_migrate_apps = post_migrate_state.apps
@@ -375,8 +376,8 @@ class Command(BaseCommand):
[ModelState.from_model(apps.get_model(*model)) for model in model_keys]
)
- # Send the post_migrate signal, so individual apps can do whatever they need
- # to do at this point.
+ # Send the post_migrate signal, so individual apps can do whatever they
+ # need to do at this point.
emit_post_migrate_signal(
self.verbosity,
self.interactive,
diff --git a/django/core/management/commands/shell.py b/django/core/management/commands/shell.py
index b05e7ff404..47342291e8 100644
--- a/django/core/management/commands/shell.py
+++ b/django/core/management/commands/shell.py
@@ -70,8 +70,8 @@ class Command(BaseCommand):
# Set up a dictionary to serve as the environment for the shell.
imported_objects = self.get_namespace(**options)
- # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
- # conventions and get $PYTHONSTARTUP first then .pythonrc.py.
+ # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow
+ # system conventions and get $PYTHONSTARTUP first then .pythonrc.py.
if not options["no_startup"]:
for pythonrc in OrderedSet(
[os.environ.get("PYTHONSTARTUP"), os.path.expanduser("~/.pythonrc.py")]
@@ -89,9 +89,9 @@ class Command(BaseCommand):
except Exception:
traceback.print_exc()
- # By default, this will set up readline to do tab completion and to read and
- # write history to the .python_history file, but this can be overridden by
- # $PYTHONSTARTUP or ~/.pythonrc.py.
+ # By default, this will set up readline to do tab completion and to
+ # read and write history to the .python_history file, but this can be
+ # overridden by $PYTHONSTARTUP or ~/.pythonrc.py.
try:
hook = sys.__interactivehook__
except AttributeError:
diff --git a/django/core/management/commands/sqlmigrate.py b/django/core/management/commands/sqlmigrate.py
index 076499b3e2..3c2e25eeea 100644
--- a/django/core/management/commands/sqlmigrate.py
+++ b/django/core/management/commands/sqlmigrate.py
@@ -74,8 +74,8 @@ class Command(BaseCommand):
migration.atomic and connection.features.can_rollback_ddl
)
- # Make a plan that represents just the requested migrations and show SQL
- # for it
+ # Make a plan that represents just the requested migrations and show
+ # SQL for it
plan = [(loader.graph.nodes[target], options["backwards"])]
sql_statements = loader.collect_sql(plan)
if not sql_statements and options["verbosity"] >= 1:
diff --git a/django/core/paginator.py b/django/core/paginator.py
index 4376d6db85..aabc2f8532 100644
--- a/django/core/paginator.py
+++ b/django/core/paginator.py
@@ -56,7 +56,8 @@ class BasePaginator:
else self.default_error_messages | error_messages
)
if self.per_page <= self.orphans:
- # RemovedInDjango70Warning: When the deprecation ends, replace with:
+ # RemovedInDjango70Warning: When the deprecation ends, replace
+ # with:
# raise ValueError(
# "The orphans argument cannot be larger than or equal to the "
# "per_page argument."
diff --git a/django/core/serializers/__init__.py b/django/core/serializers/__init__.py
index 7e23769533..2f730d5cca 100644
--- a/django/core/serializers/__init__.py
+++ b/django/core/serializers/__init__.py
@@ -225,8 +225,9 @@ def sort_dependencies(app_list, allow_cycles=False):
model, deps = model_dependencies.pop()
# If all of the models in the dependency list are either already
- # on the final model list, or not on the original serialization list,
- # then we've found another model with all it's dependencies satisfied.
+ # on the final model list, or not on the original serialization
+ # list, then we've found another model with all it's dependencies
+ # satisfied.
if all(d not in models or d in model_list for d in deps):
model_list.append(model)
changed = True
diff --git a/django/core/serializers/base.py b/django/core/serializers/base.py
index 1fbca9244b..efc55981eb 100644
--- a/django/core/serializers/base.py
+++ b/django/core/serializers/base.py
@@ -108,8 +108,9 @@ class Serializer:
self.first = True
for count, obj in enumerate(queryset, start=1):
self.start_object(obj)
- # Use the concrete parent class' _meta instead of the object's _meta
- # This is to avoid local_fields problems for proxy models. Refs #17717.
+ # Use the concrete parent class' _meta instead of the object's
+ # _meta This is to avoid local_fields problems for proxy models.
+ # Refs #17717.
concrete_model = obj._meta.concrete_model
# When using natural primary keys, retrieve the pk field of the
# parent for multi-table inheritance child models. That field must
diff --git a/django/core/serializers/python.py b/django/core/serializers/python.py
index 807d4b3977..2929874b01 100644
--- a/django/core/serializers/python.py
+++ b/django/core/serializers/python.py
@@ -1,7 +1,7 @@
"""
A Python "serializer". Doesn't do much serializing per se -- just converts to
-and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
-other serializers.
+and from basic Python data types (lists, dicts, strings, etc.). Useful as a
+basis for other serializers.
"""
from django.apps import apps
diff --git a/django/core/serializers/pyyaml.py b/django/core/serializers/pyyaml.py
index c72d1fa03b..fdc245756f 100644
--- a/django/core/serializers/pyyaml.py
+++ b/django/core/serializers/pyyaml.py
@@ -46,11 +46,12 @@ class Serializer(PythonSerializer):
def _value_from_field(self, obj, field):
# A nasty special case: base YAML doesn't support serialization of time
- # types (as opposed to dates or datetimes, which it does support). Since
- # we want to use the "safe" serializer for better interoperability, we
- # need to do something with those pesky times. Converting 'em to strings
- # isn't perfect, but it's better than a "!!python/time" type which would
- # halt deserialization under any other language.
+ # types (as opposed to dates or datetimes, which it does support).
+ # Since we want to use the "safe" serializer for better
+ # interoperability, we need to do something with those pesky times.
+ # Converting 'em to strings isn't perfect, but it's better than a
+ # "!!python/time" type which would halt deserialization under any other
+ # language.
value = super()._value_from_field(obj, field)
if isinstance(value, datetime.time):
value = str(value)
diff --git a/django/core/serializers/xml_serializer.py b/django/core/serializers/xml_serializer.py
index 360d5309d8..0557af3954 100644
--- a/django/core/serializers/xml_serializer.py
+++ b/django/core/serializers/xml_serializer.py
@@ -250,7 +250,8 @@ class Deserializer(base.Deserializer):
continue
field = Model._meta.get_field(field_name)
- # As is usually the case, relation fields get the special treatment.
+ # As is usually the case, relation fields get the special
+ # treatment.
if field.remote_field and isinstance(
field.remote_field, models.ManyToManyRel
):
@@ -303,7 +304,8 @@ class Deserializer(base.Deserializer):
if hasattr(model._default_manager, "get_by_natural_key"):
keys = node.getElementsByTagName("natural")
if keys:
- # If there are 'natural' subelements, it must be a natural key
+ # If there are 'natural' subelements, it must be a natural
+ # key
field_value = [getInnerText(k).strip() for k in keys]
try:
obj = model._default_manager.db_manager(
@@ -343,7 +345,8 @@ class Deserializer(base.Deserializer):
def m2m_convert(n):
keys = n.getElementsByTagName("natural")
if keys:
- # If there are 'natural' subelements, it must be a natural key
+ # If there are 'natural' subelements, it must be a natural
+ # key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = (
default_manager.db_manager(self.db)
@@ -394,7 +397,8 @@ class Deserializer(base.Deserializer):
def getInnerText(node):
"""Get all the inner text of a DOM node (recursively)."""
- # inspired by https://mail.python.org/pipermail/xml-sig/2005-March/011022.html
+ # inspired by
+ # https://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if (
diff --git a/django/core/servers/basehttp.py b/django/core/servers/basehttp.py
index 495657d264..41719034fb 100644
--- a/django/core/servers/basehttp.py
+++ b/django/core/servers/basehttp.py
@@ -234,7 +234,9 @@ class WSGIRequestHandler(simple_server.WSGIRequestHandler):
pass
def handle_one_request(self):
- """Copy of WSGIRequestHandler.handle() but with different ServerHandler"""
+ """
+ Copy of WSGIRequestHandler.handle() but with different ServerHandler
+ """
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ""
diff --git a/django/core/signing.py b/django/core/signing.py
index e3d7785910..ed56ce0908 100644
--- a/django/core/signing.py
+++ b/django/core/signing.py
@@ -17,7 +17,8 @@ If the signature fails, a BadSignature exception is raised.
'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified")
...
-BadSignature: Signature "ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified" does not match
+BadSignature: Signature "ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified" does
+not match
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
diff --git a/django/db/backends/base/base.py b/django/db/backends/base/base.py
index 54328c8450..3f3d29874a 100644
--- a/django/db/backends/base/base.py
+++ b/django/db/backends/base/base.py
@@ -595,8 +595,8 @@ class BaseDatabaseWrapper:
"""
if self.connection is not None:
self.health_check_done = False
- # If the application didn't restore the original autocommit setting,
- # don't take chances, drop the connection.
+ # If the application didn't restore the original autocommit
+ # setting, don't take chances, drop the connection.
if self.get_autocommit() != self.settings_dict["AUTOCOMMIT"]:
self.close()
return
diff --git a/django/db/backends/base/creation.py b/django/db/backends/base/creation.py
index 1ed583f9e4..7c11465f94 100644
--- a/django/db/backends/base/creation.py
+++ b/django/db/backends/base/creation.py
@@ -94,9 +94,10 @@ class BaseDatabaseCreation:
settings.MIGRATION_MODULES = old_migration_modules
# We then serialize the current state of the database into a string
- # and store it on the connection. This slightly horrific process is so people
- # who are testing on databases without transactions or who are using
- # a TransactionTestCase still get a clean database on every test run.
+ # and store it on the connection. This slightly horrific process is so
+ # people who are testing on databases without transactions or who are
+ # using a TransactionTestCase still get a clean database on every test
+ # run.
if serialize is not None:
warnings.warn(
"DatabaseCreation.create_test_db(serialize) is deprecated. Call "
@@ -112,7 +113,8 @@ class BaseDatabaseCreation:
call_command("createcachetable", database=self.connection.alias)
- # Ensure a connection for the side effect of initializing the test database.
+ # Ensure a connection for the side effect of initializing the test
+ # database.
self.connection.ensure_connection()
if os.environ.get("RUNNING_DJANGOS_TEST_SUITE") == "true":
@@ -220,8 +222,8 @@ class BaseDatabaseCreation:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
- # if we want to keep the db, then no need to do any of the below,
- # just return and skip it all.
+ # if we want to keep the db, then no need to do any of the
+ # below, just return and skip it all.
if keepdb:
return test_database_name
@@ -365,7 +367,8 @@ class BaseDatabaseCreation:
for test_name in tests:
test_case_name, _, test_method_name = test_name.rpartition(".")
test_app = test_name.split(".")[0]
- # Importing a test app that isn't installed raises RuntimeError.
+ # Importing a test app that isn't installed raises
+ # RuntimeError.
if test_app in settings.INSTALLED_APPS:
test_case = import_string(test_case_name)
test_method = getattr(test_case, test_method_name)
diff --git a/django/db/backends/base/features.py b/django/db/backends/base/features.py
index cf712739c7..ad44a31d90 100644
--- a/django/db/backends/base/features.py
+++ b/django/db/backends/base/features.py
@@ -201,7 +201,8 @@ class BaseDatabaseFeatures:
# supported by the Python driver
supports_paramstyle_pyformat = True
- # Does the backend require literal defaults, rather than parameterized ones?
+ # Does the backend require literal defaults, rather than parameterized
+ # ones?
requires_literal_defaults = False
# Does the backend support functions in defaults?
@@ -213,7 +214,8 @@ class BaseDatabaseFeatures:
# Does the backend support the DEFAULT keyword in bulk insert queries?
supports_default_keyword_in_bulk_insert = True
- # Does the backend require a connection reset after each material schema change?
+ # Does the backend require a connection reset after each material schema
+ # change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
@@ -228,11 +230,12 @@ class BaseDatabaseFeatures:
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
- # Does the backend support "select for update" queries with limit (and offset)?
+ # Does the backend support "select for update" queries with limit (and
+ # offset)?
supports_select_for_update_with_limit = True
- # Does the backend ignore null expressions in GREATEST and LEAST queries unless
- # every expression is null?
+ # Does the backend ignore null expressions in GREATEST and LEAST queries
+ # unless every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
@@ -261,10 +264,10 @@ class BaseDatabaseFeatures:
# Does the database support ORDER BY in aggregate expressions?
supports_aggregate_order_by_clause = False
- # Does the database backend support DISTINCT when using multiple arguments in an
- # aggregate expression? For example, Sqlite treats the "delimiter" argument of
- # STRING_AGG/GROUP_CONCAT as an extra argument and does not allow using a custom
- # delimiter along with DISTINCT.
+ # Does the database backend support DISTINCT when using multiple arguments
+ # in an aggregate expression? For example, Sqlite treats the "delimiter"
+ # argument of STRING_AGG/GROUP_CONCAT as an extra argument and does not
+ # allow using a custom delimiter along with DISTINCT.
supports_aggregate_distinct_multiple_argument = True
# Does the database support SQL 2023 ANY_VALUE in GROUP BY?
diff --git a/django/db/backends/base/introspection.py b/django/db/backends/base/introspection.py
index 12360538b9..3a62ab6327 100644
--- a/django/db/backends/base/introspection.py
+++ b/django/db/backends/base/introspection.py
@@ -158,8 +158,9 @@ class BaseDatabaseIntrospection:
def get_sequences(self, cursor, table_name, table_fields=()):
"""
Return a list of introspected sequences for table_name. Each sequence
- is a dict: {'table': , 'column': }. An optional
- 'name' key can be added if the backend supports named sequences.
+ is a dict: {'table': , 'column': }. An
+ optional 'name' key can be added if the backend supports named
+ sequences.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseIntrospection may require a get_sequences() "
@@ -168,8 +169,8 @@ class BaseDatabaseIntrospection:
def get_relations(self, cursor, table_name):
"""
- Return a dictionary of {field_name: (field_name_other_table, other_table)}
- representing all foreign keys in the given table.
+ Return a dictionary of {field_name: (field_name_other_table,
+ other_table)} representing all foreign keys in the given table.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseIntrospection may require a "
diff --git a/django/db/backends/base/operations.py b/django/db/backends/base/operations.py
index c426a2b90a..a95f535bdb 100644
--- a/django/db/backends/base/operations.py
+++ b/django/db/backends/base/operations.py
@@ -546,8 +546,8 @@ class BaseDatabaseOperations:
def adapt_datetimefield_value(self, value):
"""
- Transform a datetime value to an object compatible with what is expected
- by the backend driver for datetime columns.
+ Transform a datetime value to an object compatible with what is
+ expected by the backend driver for datetime columns.
"""
if value is None:
return None
diff --git a/django/db/backends/base/schema.py b/django/db/backends/base/schema.py
index 3d7ea83dd7..5262864e7f 100644
--- a/django/db/backends/base/schema.py
+++ b/django/db/backends/base/schema.py
@@ -494,8 +494,8 @@ class BaseDatabaseSchemaEditor:
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
- for particularly tricky backends (defaults are not user-defined, though,
- so this is safe).
+ for particularly tricky backends (defaults are not user-defined,
+ though, so this is safe).
"""
raise NotImplementedError()
@@ -1234,7 +1234,8 @@ class BaseDatabaseSchemaEditor:
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
- # Handle our type alters on the other end of rels from the PK stuff above
+ # Handle our type alters on the other end of rels from the PK stuff
+ # above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params["type"]
@@ -1483,7 +1484,8 @@ class BaseDatabaseSchemaEditor:
)
self.alter_field(
new_field.remote_field.through,
- # for self-referential models we need to alter field from the other end too
+ # for self-referential models we need to alter field from the other
+ # end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
diff --git a/django/db/backends/dummy/base.py b/django/db/backends/dummy/base.py
index 0e2edc73cf..c42fc5ef76 100644
--- a/django/db/backends/dummy/base.py
+++ b/django/db/backends/dummy/base.py
@@ -1,7 +1,8 @@
"""
Dummy database backend for Django.
-Django uses this if the database ENGINE setting is empty (None or empty string).
+Django uses this if the database ENGINE setting is empty (None or empty
+string).
Each of these API functions, except connection.close(), raise
ImproperlyConfigured.
diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py
index e594067b50..2ad8627dfe 100644
--- a/django/db/backends/mysql/base.py
+++ b/django/db/backends/mysql/base.py
@@ -22,7 +22,8 @@ except ImportError as err:
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions
-# Some of these import MySQLdb, so import them after checking if it's installed.
+# Some of these import MySQLdb, so import them after checking if it's
+# installed.
from .client import DatabaseClient
from .creation import DatabaseCreation
from .features import DatabaseFeatures
@@ -57,7 +58,8 @@ class CursorWrapper:
exception instances and reraises them with the correct types.
Implemented as a wrapper, rather than a subclass, so that it isn't stuck
- to the particular underlying representation returned by Connection.cursor().
+ to the particular underlying representation returned by
+ Connection.cursor().
"""
codes_for_integrityerror = (
@@ -101,9 +103,10 @@ class CursorWrapper:
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "mysql"
# This dictionary maps Field objects to their associated MySQL column
- # types, as strings. Column-type strings can contain format strings; they'll
- # be interpolated against the values of Field.__dict__ before being output.
- # If a column type is set to None, it won't be included in the output.
+ # types, as strings. Column-type strings can contain format strings;
+ # they'll be interpolated against the values of Field.__dict__ before being
+ # output. If a column type is set to None, it won't be included in the
+ # output.
_data_types = {
"AutoField": "integer AUTO_INCREMENT",
@@ -175,13 +178,13 @@ class DatabaseWrapper(BaseDatabaseWrapper):
}
# The patterns below are used to generate SQL pattern lookup clauses when
- # the right-hand side of the lookup isn't a raw string (it might be an expression
- # or the result of a bilateral transformation).
- # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
- # escaped on database side.
+ # the right-hand side of the lookup isn't a raw string (it might be an
+ # expression or the result of a bilateral transformation). In those cases,
+ # special characters for LIKE operators (e.g. \, *, _) should be escaped on
+ # database side.
#
- # Note: we use str.format() here for readability as '%' is used as a wildcard for
- # the LIKE operator.
+ # Note: we use str.format() here for readability as '%' is used as a
+ # wildcard for the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
"contains": "LIKE BINARY CONCAT('%%', {}, '%%')",
diff --git a/django/db/backends/mysql/features.py b/django/db/backends/mysql/features.py
index 6ae4c56af1..24ecc0d80b 100644
--- a/django/db/backends/mysql/features.py
+++ b/django/db/backends/mysql/features.py
@@ -137,7 +137,9 @@ class DatabaseFeatures(BaseDatabaseFeatures):
@cached_property
def _mysql_storage_engine(self):
- "Internal method used in Django tests. Don't rely on this from your code"
+ """
+ Internal method used in Django tests. Don't rely on this from your code
+ """
return self.connection.mysql_server_data["default_storage_engine"]
@cached_property
diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py
index f00d57cce4..24f773f009 100644
--- a/django/db/backends/mysql/introspection.py
+++ b/django/db/backends/mysql/introspection.py
@@ -131,9 +131,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
)
row = cursor.fetchone()
default_column_collation = row[0] if row else ""
- # information_schema database gives more accurate results for some figures:
- # - varchar length returned by cursor.description is an internal length,
- # not visible length (#5725)
+ # information_schema database gives more accurate results for some
+ # figures:
+ # - varchar length returned by cursor.description is an internal
+ # length, not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute(
@@ -195,8 +196,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
def get_relations(self, cursor, table_name):
"""
- Return a dictionary of {field_name: (field_name_other_table, other_table)}
- representing all foreign keys in the given table.
+ Return a dictionary of {field_name: (field_name_other_table,
+ other_table)} representing all foreign keys in the given table.
"""
cursor.execute(
"""
diff --git a/django/db/backends/mysql/operations.py b/django/db/backends/mysql/operations.py
index 9806303539..2d6185a2ca 100644
--- a/django/db/backends/mysql/operations.py
+++ b/django/db/backends/mysql/operations.py
@@ -359,7 +359,8 @@ class DatabaseOperations(BaseDatabaseOperations):
return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), params
def explain_query_prefix(self, format=None, **options):
- # Alias MySQL's TRADITIONAL to TEXT for consistency with other backends.
+ # Alias MySQL's TRADITIONAL to TEXT for consistency with other
+ # backends.
if format and format.upper() == "TEXT":
format = "TRADITIONAL"
elif (
diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py
index d00c28c3e9..c2ad881ecc 100644
--- a/django/db/backends/oracle/base.py
+++ b/django/db/backends/oracle/base.py
@@ -49,8 +49,8 @@ _setup_environment(
[
# Oracle takes client-side character set encoding from the environment.
("NLS_LANG", ".AL32UTF8"),
- # This prevents Unicode from getting mangled by getting encoded into the
- # potentially non-Unicode database character set.
+ # This prevents Unicode from getting mangled by getting encoded into
+ # the potentially non-Unicode database character set.
("ORA_NCHAR_LITERAL_REPLACE", "TRUE"),
]
)
@@ -110,12 +110,13 @@ class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "oracle"
display_name = "Oracle"
# This dictionary maps Field objects to their associated Oracle column
- # types, as strings. Column-type strings can contain format strings; they'll
- # be interpolated against the values of Field.__dict__ before being output.
- # If a column type is set to None, it won't be included in the output.
+ # types, as strings. Column-type strings can contain format strings;
+ # they'll be interpolated against the values of Field.__dict__ before being
+ # output. If a column type is set to None, it won't be included in the
+ # output.
#
- # Any format strings starting with "qn_" are quoted before being used in the
- # output (the "qn_" prefix is stripped before the lookup is performed.
+ # Any format strings starting with "qn_" are quoted before being used in
+ # the output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
"AutoField": "NUMBER(11) GENERATED BY DEFAULT ON NULL AS IDENTITY",
"BigAutoField": "NUMBER(19) GENERATED BY DEFAULT ON NULL AS IDENTITY",
@@ -200,13 +201,13 @@ class DatabaseWrapper(BaseDatabaseWrapper):
}
# The patterns below are used to generate SQL pattern lookup clauses when
- # the right-hand side of the lookup isn't a raw string (it might be an expression
- # or the result of a bilateral transformation).
- # In those cases, special characters for LIKE operators (e.g. \, %, _)
- # should be escaped on the database side.
+ # the right-hand side of the lookup isn't a raw string (it might be an
+ # expression or the result of a bilateral transformation). In those cases,
+ # special characters for LIKE operators (e.g. \, %, _) should be escaped on
+ # the database side.
#
- # Note: we use str.format() here for readability as '%' is used as a wildcard for
- # the LIKE operator.
+ # Note: we use str.format() here for readability as '%' is used as a
+ # wildcard for the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
_pattern_ops = {
"contains": "'%%' || {} || '%%'",
diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py
index 682379930f..efbec14d6e 100644
--- a/django/db/backends/oracle/creation.py
+++ b/django/db/backends/oracle/creation.py
@@ -36,7 +36,8 @@ class DatabaseCreation(BaseDatabaseCreation):
)
except Exception as e:
if "ORA-01543" not in str(e):
- # All errors except "tablespace already exists" cancel tests
+ # All errors except "tablespace already exists" cancel
+ # tests
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
if not autoclobber:
@@ -406,7 +407,8 @@ class DatabaseCreation(BaseDatabaseCreation):
def _test_database_passwd(self):
password = self._test_settings_get("PASSWORD")
if password is None and self._test_user_create():
- # Oracle passwords are limited to 30 chars and can't contain symbols.
+ # Oracle passwords are limited to 30 chars and can't contain
+ # symbols.
password = get_random_string(30)
return password
diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py
index b0077344ac..12b9b9a097 100644
--- a/django/db/backends/oracle/introspection.py
+++ b/django/db/backends/oracle/introspection.py
@@ -254,8 +254,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
def get_relations(self, cursor, table_name):
"""
- Return a dictionary of {field_name: (field_name_other_table, other_table)}
- representing all foreign keys in the given table.
+ Return a dictionary of {field_name: (field_name_other_table,
+ other_table)} representing all foreign keys in the given table.
"""
table_name = table_name.upper()
cursor.execute(
diff --git a/django/db/backends/oracle/operations.py b/django/db/backends/oracle/operations.py
index 46a681a520..ce9ed7288d 100644
--- a/django/db/backends/oracle/operations.py
+++ b/django/db/backends/oracle/operations.py
@@ -378,9 +378,9 @@ END;
# We simplify things by making Oracle identifiers always uppercase.
if not name.startswith('"') and not name.endswith('"'):
name = '"%s"' % truncate_name(name, self.max_name_length())
- # Oracle puts the query text into a (query % args) construct, so % signs
- # in names need to be escaped. The '%%' will be collapsed back to '%' at
- # that stage so we aren't really making the name longer here.
+ # Oracle puts the query text into a (query % args) construct, so %
+ # signs in names need to be escaped. The '%%' will be collapsed back to
+ # '%' at that stage so we aren't really making the name longer here.
name = name.replace("%", "%%")
return name.upper()
@@ -589,8 +589,8 @@ END;
def adapt_datetimefield_value(self, value):
"""
- Transform a datetime value to an object compatible with what is expected
- by the backend driver for datetime columns.
+ Transform a datetime value to an object compatible with what is
+ expected by the backend driver for datetime columns.
If naive datetime is passed assumes that is in UTC. Normally Django
models.DateTimeField makes sure that if USE_TZ is True passed datetime
diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py
index f094bfb038..48a048575d 100644
--- a/django/db/backends/oracle/schema.py
+++ b/django/db/backends/oracle/schema.py
@@ -136,7 +136,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
elif new_internal_type == "DateTimeField":
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
elif new_internal_type == "TimeField":
- # TimeField are stored as TIMESTAMP with a 1900-01-01 date part.
+ # TimeField are stored as TIMESTAMP with a 1900-01-01 date
+ # part.
new_value = "CONCAT('1900-01-01 ', %s)" % new_value
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
# Transfer values across
diff --git a/django/db/backends/postgresql/base.py b/django/db/backends/postgresql/base.py
index a0b5e4154e..cafa4c7a9c 100644
--- a/django/db/backends/postgresql/base.py
+++ b/django/db/backends/postgresql/base.py
@@ -61,8 +61,8 @@ else:
psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString)
psycopg2.extras.register_uuid()
- # Register support for inet[] manually so we don't have to handle the Inet()
- # object on load all the time.
+ # Register support for inet[] manually so we don't have to handle the
+ # Inet() object on load all the time.
INETARRAY_OID = 1041
INETARRAY = psycopg2.extensions.new_array_type(
(INETARRAY_OID,),
@@ -71,7 +71,8 @@ else:
)
psycopg2.extensions.register_type(INETARRAY)
-# Some of these import psycopg, so import them after checking if it's installed.
+# Some of these import psycopg, so import them after checking if it's
+# installed.
from .client import DatabaseClient # NOQA isort:skip
from .creation import DatabaseCreation # NOQA isort:skip
from .features import DatabaseFeatures # NOQA isort:skip
@@ -90,9 +91,10 @@ class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "postgresql"
display_name = "PostgreSQL"
# This dictionary maps Field objects to their associated PostgreSQL column
- # types, as strings. Column-type strings can contain format strings; they'll
- # be interpolated against the values of Field.__dict__ before being output.
- # If a column type is set to None, it won't be included in the output.
+ # types, as strings. Column-type strings can contain format strings;
+ # they'll be interpolated against the values of Field.__dict__ before being
+ # output. If a column type is set to None, it won't be included in the
+ # output.
data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
@@ -150,13 +152,13 @@ class DatabaseWrapper(BaseDatabaseWrapper):
}
# The patterns below are used to generate SQL pattern lookup clauses when
- # the right-hand side of the lookup isn't a raw string (it might be an expression
- # or the result of a bilateral transformation).
- # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
- # escaped on database side.
+ # the right-hand side of the lookup isn't a raw string (it might be an
+ # expression or the result of a bilateral transformation). In those cases,
+ # special characters for LIKE operators (e.g. \, *, _) should be escaped on
+ # database side.
#
- # Note: we use str.format() here for readability as '%' is used as a wildcard for
- # the LIKE operator.
+ # Note: we use str.format() here for readability as '%' is used as a
+ # wildcard for the LIKE operator.
pattern_esc = (
r"REPLACE(REPLACE(REPLACE({}, E'\\', E'\\\\'), E'%%', E'\\%%'), E'_', E'\\_')"
)
diff --git a/django/db/backends/postgresql/introspection.py b/django/db/backends/postgresql/introspection.py
index aaa3d93e60..82013eb191 100644
--- a/django/db/backends/postgresql/introspection.py
+++ b/django/db/backends/postgresql/introspection.py
@@ -154,8 +154,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
def get_relations(self, cursor, table_name):
"""
- Return a dictionary of {field_name: (field_name_other_table, other_table)}
- representing all foreign keys in the given table.
+ Return a dictionary of {field_name: (field_name_other_table,
+ other_table)} representing all foreign keys in the given table.
"""
cursor.execute(
"""
diff --git a/django/db/backends/postgresql/operations.py b/django/db/backends/postgresql/operations.py
index 9db755bb89..a8c073e418 100644
--- a/django/db/backends/postgresql/operations.py
+++ b/django/db/backends/postgresql/operations.py
@@ -221,8 +221,8 @@ class DatabaseOperations(BaseDatabaseOperations):
return ["%s;" % " ".join(sql_parts)]
def sequence_reset_by_name_sql(self, style, sequences):
- # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
- # to reset sequence indices
+ # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL
+ # statements to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info["table"]
diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py
index f7b65651b3..e378975207 100644
--- a/django/db/backends/sqlite3/base.py
+++ b/django/db/backends/sqlite3/base.py
@@ -60,9 +60,9 @@ Database.register_adapter(datetime.datetime, adapt_datetime)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "sqlite"
display_name = "SQLite"
- # SQLite doesn't actually support most of these types, but it "does the right
- # thing" given more verbose field definitions, so leave them as is so that
- # schema inspection is more useful.
+ # SQLite doesn't actually support most of these types, but it "does the
+ # right thing" given more verbose field definitions, so leave them as is so
+ # that schema inspection is more useful.
data_types = {
"AutoField": "integer",
"BigAutoField": "integer",
@@ -124,13 +124,13 @@ class DatabaseWrapper(BaseDatabaseWrapper):
}
# The patterns below are used to generate SQL pattern lookup clauses when
- # the right-hand side of the lookup isn't a raw string (it might be an expression
- # or the result of a bilateral transformation).
- # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
- # escaped on database side.
+ # the right-hand side of the lookup isn't a raw string (it might be an
+ # expression or the result of a bilateral transformation). In those cases,
+ # special characters for LIKE operators (e.g. \, *, _) should be escaped on
+ # database side.
#
- # Note: we use str.format() here for readability as '%' is used as a wildcard for
- # the LIKE operator.
+ # Note: we use str.format() here for readability as '%' is used as a
+ # wildcard for the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
"contains": r"LIKE '%%' || {} || '%%' ESCAPE '\'",
diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py
index 10edfd81b9..ac6ae5efbd 100644
--- a/django/db/backends/sqlite3/schema.py
+++ b/django/db/backends/sqlite3/schema.py
@@ -206,9 +206,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
# Construct a new model with the new fields to allow self referential
# primary key to resolve to. This model won't ever be materialized as a
- # table and solely exists for foreign key reference resolution purposes.
- # This wouldn't be required if the schema editor was operating on model
- # states instead of rendered models.
+ # table and solely exists for foreign key reference resolution
+ # purposes. This wouldn't be required if the schema editor was
+ # operating on model states instead of rendered models.
meta_contents = {
"app_label": model._meta.app_label,
"db_table": model._meta.db_table,
@@ -303,10 +303,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
if field.many_to_many and field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
elif isinstance(field, CompositePrimaryKey):
- # If a CompositePrimaryKey field was added, the existing primary key field
- # had to be altered too, resulting in an AddField, AlterField migration.
- # The table cannot be re-created on AddField, it would result in a
- # duplicate primary key error.
+ # If a CompositePrimaryKey field was added, the existing primary
+ # key field had to be altered too, resulting in an AddField,
+ # AlterField migration. The table cannot be re-created on AddField,
+ # it would result in a duplicate primary key error.
return
elif (
# Primary keys and unique fields are not supported in ALTER TABLE
@@ -404,7 +404,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
related_models.add(remote_field.through)
if new_field.primary_key:
for many_to_many in opts.many_to_many:
- # Ignore self-relationship since the table was already rebuilt.
+ # Ignore self-relationship since the table was already
+ # rebuilt.
if many_to_many.related_model == model:
continue
if many_to_many.remote_field.through._meta.auto_created:
diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py
index 648bac389c..c319cb2c03 100644
--- a/django/db/migrations/autodetector.py
+++ b/django/db/migrations/autodetector.py
@@ -92,8 +92,9 @@ class MigrationAutodetector:
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
- # If this is a type that implements 'deconstruct' as an instance method,
- # avoid treating this as being deconstructible itself - see #22951
+ # If this is a type that implements 'deconstruct' as an instance
+ # method, avoid treating this as being deconstructible itself - see
+ # #22951
return obj
elif hasattr(obj, "deconstruct"):
deconstructed = obj.deconstruct()
@@ -754,7 +755,8 @@ class MigrationAutodetector:
beginning=True,
)
- # Don't add operations which modify the database for unmanaged models
+ # Don't add operations which modify the database for unmanaged
+ # models
if not model_state.options.get("managed", True):
continue
@@ -904,7 +906,8 @@ class MigrationAutodetector:
bases=model_state.bases,
managers=model_state.managers,
),
- # Depend on the deletion of any possible non-proxy version of us
+ # Depend on the deletion of any possible non-proxy version of
+ # us
dependencies=dependencies,
)
@@ -980,8 +983,8 @@ class MigrationAutodetector:
],
)
# Finally, remove the model.
- # This depends on both the removal/alteration of all incoming fields
- # and the removal of all its own related fields, and if it's
+ # This depends on both the removal/alteration of all incoming
+ # fields and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
relations = self.from_state.relations
@@ -1219,8 +1222,8 @@ class MigrationAutodetector:
name=field_name,
),
# We might need to depend on the removal of an
- # order_with_respect_to or index/constraint/unique_together operation;
- # this is safely ignored if there isn't one
+ # order_with_respect_to or index/constraint/unique_together
+ # operation; this is safely ignored if there isn't one
dependencies=[
OperationDependency(
app_label,
@@ -1265,8 +1268,8 @@ class MigrationAutodetector:
field_name
)
dependencies = []
- # Implement any model renames on relations; these are handled by RenameModel
- # so we need to exclude them from the comparison
+ # Implement any model renames on relations; these are handled by
+ # RenameModel so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(
new_field.remote_field, "model", None
):
@@ -1287,7 +1290,8 @@ class MigrationAutodetector:
new_field.remote_field.field_name = (
old_field.remote_field.field_name
)
- # Handle ForeignObjects which can have multiple from_fields/to_fields.
+ # Handle ForeignObjects which can have multiple
+ # from_fields/to_fields.
from_fields = getattr(new_field, "from_fields", None)
if from_fields:
from_rename_key = (app_label, model_name)
@@ -1718,7 +1722,8 @@ class MigrationAutodetector:
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
- # We run the old version through the field renames to account for those
+ # We run the old version through the field renames to account for
+ # those
old_value = old_model_state.options.get(option_name)
old_value = (
{
diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py
index 61b2b54f6c..1ad7d0c18c 100644
--- a/django/db/migrations/executor.py
+++ b/django/db/migrations/executor.py
@@ -21,7 +21,8 @@ class MigrationExecutor:
def migration_plan(self, targets, clean_start=False):
"""
- Given a set of targets, return a list of (Migration instance, backwards?).
+ Given a set of targets, return a list of (Migration instance,
+ backwards?).
"""
plan = []
if clean_start:
@@ -29,7 +30,8 @@ class MigrationExecutor:
else:
applied = dict(self.loader.applied_migrations)
for target in targets:
- # If the target is (app_label, None), that means unmigrate everything
+ # If the target is (app_label, None), that means unmigrate
+ # everything
if target[1] is None:
for root in self.loader.graph.root_nodes():
if root[0] == target[0]:
diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py
index dd845c13e8..ff5ecbc8b4 100644
--- a/django/db/migrations/graph.py
+++ b/django/db/migrations/graph.py
@@ -71,16 +71,16 @@ class MigrationGraph:
branch merges can be detected and resolved.
Migrations files can be marked as replacing another set of migrations -
- this is to support the "squash" feature. The graph handler isn't responsible
- for these; instead, the code to load them in here should examine the
- migration files and if the replaced migrations are all either unapplied
- or not present, it should ignore the replaced ones, load in just the
- replacing migration, and repoint any dependencies that pointed to the
+ this is to support the "squash" feature. The graph handler isn't
+ responsible for these; instead, the code to load them in here should
+ examine the migration files and if the replaced migrations are all either
+ unapplied or not present, it should ignore the replaced ones, load in just
+ the replacing migration, and repoint any dependencies that pointed to the
replaced migrations to point to the replacing one.
- A node should be a tuple: (app_path, migration_name). The tree special-cases
- things within an app - namely, root nodes and leaf nodes ignore dependencies
- to other apps.
+ A node should be a tuple: (app_path, migration_name). The tree
+ special-cases things within an app - namely, root nodes and leaf nodes
+ ignore dependencies to other apps.
"""
def __init__(self):
@@ -145,7 +145,8 @@ class MigrationGraph:
child.parents.remove(replaced_node)
# We don't want to create dependencies between the replaced
# node and the replacement node as this would lead to
- # self-referencing on the replacement node at a later iteration.
+ # self-referencing on the replacement node at a later
+ # iteration.
if child.key not in replaced:
replacement_node.add_child(child)
child.add_parent(replacement_node)
@@ -315,7 +316,8 @@ class MigrationGraph:
"""
Given a migration node or nodes, return a complete ProjectState for it.
If at_end is False, return the state before the migration has run.
- If nodes is not provided, return the overall most current project state.
+ If nodes is not provided, return the overall most current project
+ state.
"""
if nodes is None:
nodes = list(self.leaf_nodes())
diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py
index 207be657b4..66944c7ab7 100644
--- a/django/db/migrations/loader.py
+++ b/django/db/migrations/loader.py
@@ -34,9 +34,9 @@ class MigrationLoader:
Some migrations will be marked as "replacing" another set of migrations.
These are loaded into a separate set of migrations away from the main ones.
If all the migrations they replace are either unapplied or missing from
- disk, then they are injected into the main set, replacing the named migrations.
- Any dependency pointers to the replaced migrations are re-pointed to the
- new migration.
+ disk, then they are injected into the main set, replacing the named
+ migrations. Any dependency pointers to the replaced migrations are
+ re-pointed to the new migration.
This does mean that this class MUST also talk to the database as well as
to disk, but this is probably fine. We're already not just operating
@@ -145,7 +145,8 @@ class MigrationLoader:
def get_migration_by_prefix(self, app_label, name_prefix):
"""
- Return the migration(s) which match the given app label and name_prefix.
+ Return the migration(s) which match the given app label and
+ name_prefix.
"""
# Do the search
results = []
@@ -274,7 +275,8 @@ class MigrationLoader:
"""
Build a migration dependency graph using both the disk and database.
You'll need to rebuild the graph if you apply migrations. This isn't
- usually a problem as generally migration stuff runs in a one-shot process.
+ usually a problem as generally migration stuff runs in a one-shot
+ process.
"""
# Load disk data
self.load_disk()
@@ -285,7 +287,8 @@ class MigrationLoader:
recorder = MigrationRecorder(self.connection)
self.applied_migrations = recorder.applied_migrations()
# To start, populate the migration graph with nodes for ALL migrations
- # and their dependencies. Also make note of replacing migrations at this step.
+ # and their dependencies. Also make note of replacing migrations at
+ # this step.
self.graph = MigrationGraph()
self.replacements = {}
for key, migration in self.disk_migrations.items():
@@ -296,7 +299,8 @@ class MigrationLoader:
for key, migration in self.disk_migrations.items():
# Internal (same app) dependencies.
self.add_internal_dependencies(key, migration)
- # Add external dependencies now that the internal ones have been resolved.
+ # Add external dependencies now that the internal ones have been
+ # resolved.
for key, migration in self.disk_migrations.items():
self.add_external_dependencies(key, migration)
# Carry out replacements where possible and if enabled.
@@ -310,8 +314,8 @@ class MigrationLoader:
except NodeNotFoundError as exc:
# Check if the missing node could have been replaced by any squash
# migration but wasn't because the squash migration was partially
- # applied before. In that case raise a more understandable exception
- # (#23556).
+ # applied before. In that case raise a more understandable
+ # exception (#23556).
# Get reverse replacements.
reverse_replacements = {}
for key, migration in self.replacements.items():
diff --git a/django/db/migrations/operations/special.py b/django/db/migrations/operations/special.py
index 196f24fcd6..0700023325 100644
--- a/django/db/migrations/operations/special.py
+++ b/django/db/migrations/operations/special.py
@@ -31,7 +31,8 @@ class SeparateDatabaseAndState(Operation):
state_operation.state_forwards(app_label, state)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
- # We calculate state separately in here since our state functions aren't useful
+ # We calculate state separately in here since our state functions
+ # aren't useful
for database_operation in self.database_operations:
to_state = from_state.clone()
database_operation.state_forwards(app_label, to_state)
@@ -41,7 +42,8 @@ class SeparateDatabaseAndState(Operation):
from_state = to_state
def database_backwards(self, app_label, schema_editor, from_state, to_state):
- # We calculate state separately in here since our state functions aren't useful
+ # We calculate state separately in here since our state functions
+ # aren't useful
to_states = {}
for dbop in self.database_operations:
to_states[dbop] = to_state
@@ -189,10 +191,11 @@ class RunPython(Operation):
if router.allow_migrate(
schema_editor.connection.alias, app_label, **self.hints
):
- # We now execute the Python code in a context that contains a 'models'
- # object, representing the versioned models as an app registry.
- # We could try to override the global cache, but then people will still
- # use direct imports, so we go with a documentation approach instead.
+ # We now execute the Python code in a context that contains a
+ # 'models' object, representing the versioned models as an app
+ # registry. We could try to override the global cache, but then
+ # people will still use direct imports, so we go with a
+ # documentation approach instead.
self.code(from_state.apps, schema_editor)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
diff --git a/django/db/migrations/serializer.py b/django/db/migrations/serializer.py
index cfc2657261..8366fb0a42 100644
--- a/django/db/migrations/serializer.py
+++ b/django/db/migrations/serializer.py
@@ -250,7 +250,8 @@ class OperationSerializer(BaseSerializer):
from django.db.migrations.writer import OperationWriter
string, imports = OperationWriter(self.value, indentation=0).serialize()
- # Nested operation, trailing comma is handled in upper OperationWriter._write()
+ # Nested operation, trailing comma is handled in upper
+ # OperationWriter._write()
return string.rstrip(","), imports
diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py
index 8e6dd5538f..802aeb0b5e 100644
--- a/django/db/migrations/state.py
+++ b/django/db/migrations/state.py
@@ -73,9 +73,10 @@ def get_related_models_recursive(model):
Relationships are either defined by explicit relational fields, like
ForeignKey, ManyToManyField or OneToOneField, or by inheriting from another
- model (a superclass is related to its subclasses, but not vice versa). Note,
- however, that a model inheriting from a concrete model is also related to
- its superclass through the implicit *_ptr OneToOneField on the subclass.
+ model (a superclass is related to its subclasses, but not vice versa).
+ Note, however, that a model inheriting from a concrete model is also
+ related to its superclass through the implicit *_ptr OneToOneField on the
+ subclass.
"""
seen = set()
queue = _get_related_models(model)
diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py
index e2befd4d4e..c1101b5bb0 100644
--- a/django/db/migrations/writer.py
+++ b/django/db/migrations/writer.py
@@ -160,8 +160,8 @@ class MigrationWriter:
"\n".join(sorted(dependencies)) + "\n" if dependencies else ""
)
- # Format imports nicely, swapping imports of functions from migration files
- # for comments
+ # Format imports nicely, swapping imports of functions from migration
+ # files for comments
migration_imports = set()
for line in list(imports):
if re.match(r"^import (.*)\.\d+[^\s]*$", line):
diff --git a/django/db/models/aggregates.py b/django/db/models/aggregates.py
index 444d72addb..1cf82416cb 100644
--- a/django/db/models/aggregates.py
+++ b/django/db/models/aggregates.py
@@ -353,10 +353,10 @@ class StringAgg(Aggregate):
extra_context["template"] = template
c = self.copy()
- # The creation of the delimiter SQL and the ordering of the parameters must be
- # handled explicitly, as MySQL puts the delimiter at the end of the aggregate
- # using the `SEPARATOR` declaration (rather than treating as an expression like
- # other database backends).
+ # The creation of the delimiter SQL and the ordering of the parameters
+ # must be handled explicitly, as MySQL puts the delimiter at the end of
+ # the aggregate using the `SEPARATOR` declaration (rather than treating
+ # as an expression like other database backends).
delimiter_params = []
if c.delimiter:
delimiter_sql, delimiter_params = compiler.compile(c.delimiter)
diff --git a/django/db/models/base.py b/django/db/models/base.py
index 901743147d..7c20319da6 100644
--- a/django/db/models/base.py
+++ b/django/db/models/base.py
@@ -493,10 +493,10 @@ class Model(AltersData, metaclass=ModelBase):
# Set up the storage for instance state
self._state = ModelState()
- # There is a rather weird disparity here; if kwargs, it's set, then args
- # overrides it. It should be one or the other; don't duplicate the work
- # The reason for the kwargs check is that standard iterator passes in by
- # args, and instantiation for iteration is 33% faster.
+ # There is a rather weird disparity here; if kwargs, it's set, then
+ # args overrides it. It should be one or the other; don't duplicate the
+ # work The reason for the kwargs check is that standard iterator passes
+ # in by args, and instantiation for iteration is 33% faster.
if len(args) > len(opts.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
@@ -504,9 +504,9 @@ class Model(AltersData, metaclass=ModelBase):
if not kwargs:
fields_iter = iter(opts.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
- # when an iter throws it. So if the first iter throws it, the second
- # is *not* consumed. We rely on this, so don't change the order
- # without changing the logic.
+ # when an iter throws it. So if the first iter throws it, the
+ # second is *not* consumed. We rely on this, so don't change the
+ # order without changing the logic.
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
@@ -540,7 +540,8 @@ class Model(AltersData, metaclass=ModelBase):
is_related_object = True
except KeyError:
try:
- # Object instance wasn't passed in -- must be an ID.
+ # Object instance wasn't passed in -- must be an
+ # ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
@@ -1079,7 +1080,8 @@ class Model(AltersData, metaclass=ModelBase):
and all(f.has_default() or f.has_db_default() for f in meta.pk_fields)
):
force_insert = True
- # If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
+ # If possible, try an UPDATE. If that doesn't update anything, do an
+ # INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [
@@ -1142,21 +1144,22 @@ class Model(AltersData, metaclass=ModelBase):
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
- # case we just say the update succeeded. Another case ending up here
- # is a model with just PK - in that case check that the PK still
- # exists.
+ # case we just say the update succeeded. Another case ending up
+ # here is a model with just PK - in that case check that the PK
+ # still exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
return (
filtered.exists()
and
- # It may happen that the object is deleted from the DB right after
- # this check, causing the subsequent UPDATE to return zero matching
- # rows. The same result can occur in some rare cases when the
- # database returns zero despite the UPDATE being executed
- # successfully (a row is matched and updated). In order to
- # distinguish these two cases, the object's existence in the
- # database is again checked for if the UPDATE query returns 0.
+ # It may happen that the object is deleted from the DB right
+ # after this check, causing the subsequent UPDATE to return
+ # zero matching rows. The same result can occur in some rare
+ # cases when the database returns zero despite the UPDATE being
+ # executed successfully (a row is matched and updated). In
+ # order to distinguish these two cases, the object's existence
+ # in the database is again checked for if the UPDATE query
+ # returns 0.
(filtered._update(values) > 0 or filtered.exists())
)
return filtered._update(values) > 0
@@ -1347,7 +1350,8 @@ class Model(AltersData, metaclass=ModelBase):
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
- have a special-case association with the field defined by NON_FIELD_ERRORS.
+ have a special-case association with the field defined by
+ NON_FIELD_ERRORS.
"""
pass
@@ -1878,7 +1882,9 @@ class Model(AltersData, metaclass=ModelBase):
@classmethod
def _check_m2m_through_same_relationship(cls):
- """Check if no relationship model is used by more than one m2m field."""
+ """
+ Check if no relationship model is used by more than one m2m field.
+ """
errors = []
seen_intermediary_signatures = []
@@ -2003,7 +2009,8 @@ class Model(AltersData, metaclass=ModelBase):
@classmethod
def _check_column_name_clashes(cls):
- # Store a list of column names which have already been used by other fields.
+ # Store a list of column names which have already been used by other
+ # fields.
used_column_names = []
errors = []
diff --git a/django/db/models/constraints.py b/django/db/models/constraints.py
index ae2709abb8..73ab23bdfa 100644
--- a/django/db/models/constraints.py
+++ b/django/db/models/constraints.py
@@ -593,8 +593,8 @@ class UniqueConstraint(BaseConstraint):
].features.interprets_empty_strings_as_nulls
)
):
- # A composite constraint containing NULL value cannot cause
- # a violation since NULL != NULL in SQL.
+ # A composite constraint containing NULL value cannot
+ # cause a violation since NULL != NULL in SQL.
return
lookup_kwargs[field.name] = lookup_value
lookup_args = []
@@ -646,8 +646,8 @@ class UniqueConstraint(BaseConstraint):
and self.violation_error_message
== self.default_violation_error_message
):
- # When fields are defined, use the unique_error_message() as
- # a default for backward compatibility.
+ # When fields are defined, use the unique_error_message()
+ # as a default for backward compatibility.
validation_error_message = instance.unique_error_message(
model, self.fields
)
diff --git a/django/db/models/deletion.py b/django/db/models/deletion.py
index 9221364ff4..b1939f8b35 100644
--- a/django/db/models/deletion.py
+++ b/django/db/models/deletion.py
@@ -82,8 +82,9 @@ def DO_NOTHING(collector, field, sub_objs, using):
def get_candidate_relations_to_delete(opts):
- # The candidate relations are the ones that come from N-1 and 1-1 relations.
- # N-N (i.e., many-to-many) relations aren't candidates for deletion.
+ # The candidate relations are the ones that come from N-1 and 1-1
+ # relations. N-N (i.e., many-to-many) relations aren't candidates for
+ # deletion.
return (
f
for f in opts.get_fields(include_hidden=True)
@@ -434,8 +435,8 @@ class Collector:
self.data[model] = sorted(instances, key=attrgetter("pk"))
# if possible, bring the models in an order suitable for databases that
- # don't support transactions or cannot defer constraint checks until the
- # end of a transaction.
+ # don't support transactions or cannot defer constraint checks until
+ # the end of a transaction.
self.sort()
# number of objects deleted for each model label
deleted_counter = Counter()
diff --git a/django/db/models/expressions.py b/django/db/models/expressions.py
index bf89a4f561..012a7c346b 100644
--- a/django/db/models/expressions.py
+++ b/django/db/models/expressions.py
@@ -289,7 +289,8 @@ class BaseExpression:
in this query
* reuse: a set of reusable joins for multijoins
* summarize: a terminal aggregate clause
- * for_save: whether this expression about to be used in a save or update
+ * for_save: whether this expression about to be used in a save or
+ update
Return: an Expression to be added to the query.
"""
@@ -349,9 +350,9 @@ class BaseExpression:
As a guess, if the output fields of all source fields match then simply
infer the same type here.
- If a source's output field resolves to None, exclude it from this check.
- If all sources are None, then an error is raised higher up the stack in
- the output_field property.
+ If a source's output field resolves to None, exclude it from this
+ check. If all sources are None, then an error is raised higher up the
+ stack in the output_field property.
"""
# This guess is mostly a bad idea, but there is quite a lot of code
# (especially 3rd party Func subclasses) that depend on it, we'd need a
@@ -500,7 +501,8 @@ class BaseExpression:
return sql, params
def get_expression_for_validation(self):
- # Ignore expressions that cannot be used during a constraint validation.
+ # Ignore expressions that cannot be used during a constraint
+ # validation.
if not getattr(self, "constraint_validation_compatible", True):
try:
(expression,) = self.get_source_expressions()
@@ -1264,7 +1266,8 @@ class Star(Expression):
class DatabaseDefault(Expression):
"""
- Expression to use DEFAULT keyword during insert otherwise the underlying expression.
+ Expression to use DEFAULT keyword during insert otherwise the underlying
+ expression.
"""
def __init__(self, expression, output_field=None):
@@ -1625,7 +1628,8 @@ class When(Expression):
):
c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
if for_save and c.condition is not None:
- # Resolve condition with for_save=False, since it's used as a filter.
+ # Resolve condition with for_save=False, since it's used as a
+ # filter.
c.condition = self.condition.resolve_expression(
query, allow_joins, reuse, summarize, for_save=False
)
diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py
index 69289627f0..e7add282a6 100644
--- a/django/db/models/fields/__init__.py
+++ b/django/db/models/fields/__init__.py
@@ -997,7 +997,8 @@ class Field(RegisterLookupMixin):
def get_db_prep_value(self, value, connection, prepared=False):
"""
- Return field's value prepared for interacting with the database backend.
+ Return field's value prepared for interacting with the database
+ backend.
Used by the default implementations of get_db_prep_save().
"""
@@ -1927,8 +1928,8 @@ class EmailField(CharField):
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
- # We do not exclude max_length if it matches default as we want to change
- # the default in future.
+ # We do not exclude max_length if it matches default as we want to
+ # change the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
diff --git a/django/db/models/fields/files.py b/django/db/models/fields/files.py
index 8f807b1156..5216ff565f 100644
--- a/django/db/models/fields/files.py
+++ b/django/db/models/fields/files.py
@@ -176,11 +176,11 @@ class FileDescriptor(DeferredAttribute):
# instance.file needs to ultimately return some instance of `File`,
# probably a subclass. Additionally, this returned object needs to have
# the FieldFile API so that users can easily do things like
- # instance.file.path and have that delegated to the file storage engine.
- # Easy enough if we're strict about assignment in __set__, but if you
- # peek below you can see that we're not. So depending on the current
- # value of the field we have to dynamically construct some sort of
- # "thing" to return.
+ # instance.file.path and have that delegated to the file storage
+ # engine. Easy enough if we're strict about assignment in __set__, but
+ # if you peek below you can see that we're not. So depending on the
+ # current value of the field we have to dynamically construct some sort
+ # of "thing" to return.
# The instance dict contains whatever was originally assigned
# in __set__.
@@ -204,8 +204,8 @@ class FileDescriptor(DeferredAttribute):
instance.__dict__[self.field.attname] = attr
# Other types of files may be assigned as well, but they need to have
- # the FieldFile interface added to them. Thus, we wrap any other type of
- # File inside a FieldFile (well, the field's attr_class, which is
+ # the FieldFile interface added to them. Thus, we wrap any other type
+ # of File inside a FieldFile (well, the field's attr_class, which is
# usually FieldFile).
elif isinstance(file, File) and not isinstance(file, FieldFile):
file_copy = self.field.attr_class(instance, self.field, file.name)
@@ -215,7 +215,8 @@ class FileDescriptor(DeferredAttribute):
# Finally, because of the (some would say boneheaded) way pickle works,
# the underlying FieldFile might not actually itself have an associated
- # file. So we need to reset the details of the FieldFile in those cases.
+ # file. So we need to reset the details of the FieldFile in those
+ # cases.
elif isinstance(file, FieldFile) and not hasattr(file, "field"):
file.instance = instance
file.field = self.field
diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py
index bad71a5fd6..a59dcac68c 100644
--- a/django/db/models/fields/related.py
+++ b/django/db/models/fields/related.py
@@ -663,8 +663,8 @@ class ForeignObject(RelatedField):
frozenset(uc.fields) <= foreign_fields
for uc in remote_opts.total_unique_constraints
)
- # If the model defines a composite primary key and the foreign key
- # refers to it, the target is unique.
+ # If the model defines a composite primary key and the foreign
+ # key refers to it, the target is unique.
or (
frozenset(field.name for field in remote_opts.pk_fields)
== foreign_fields
@@ -746,8 +746,8 @@ class ForeignObject(RelatedField):
kwargs["to"] = self.remote_field.model.lower()
else:
kwargs["to"] = self.remote_field.model._meta.label_lower
- # If swappable is True, then see if we're actually pointing to the target
- # of a swap.
+ # If swappable is True, then see if we're actually pointing to the
+ # target of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error
@@ -1825,8 +1825,8 @@ class ManyToManyField(RelatedField):
kwargs["through"] = self.remote_field.through._meta.label
if through_fields := getattr(self.remote_field, "through_fields", None):
kwargs["through_fields"] = through_fields
- # If swappable is True, then see if we're actually pointing to the target
- # of a swap.
+ # If swappable is True, then see if we're actually pointing to the
+ # target of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error.
diff --git a/django/db/models/fields/related_descriptors.py b/django/db/models/fields/related_descriptors.py
index 8da7aaef91..3e2150e0f6 100644
--- a/django/db/models/fields/related_descriptors.py
+++ b/django/db/models/fields/related_descriptors.py
@@ -309,17 +309,17 @@ class ForwardManyToOneDescriptor:
)
remote_field = self.field.remote_field
- # If we're setting the value of a OneToOneField to None, we need to clear
- # out the cache on any old related object. Otherwise, deleting the
- # previously-related object will also cause this object to be deleted,
- # which is wrong.
+ # If we're setting the value of a OneToOneField to None, we need to
+ # clear out the cache on any old related object. Otherwise, deleting
+ # the previously-related object will also cause this object to be
+ # deleted, which is wrong.
if value is None:
- # Look up the previously-related object, which may still be available
- # since we've not yet cleared out the related field.
- # Use the cache directly, instead of the accessor; if we haven't
+ # Look up the previously-related object, which may still be
+ # available since we've not yet cleared out the related field. Use
+ # the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
- # the object to invalidate the accessor cache, so there's no
- # need to populate the cache just to expire it again.
+ # the object to invalidate the accessor cache, so there's no need
+ # to populate the cache just to expire it again.
related = self.field.get_cached_value(instance, default=None)
# If we've got an old related object, we need to clear out its
@@ -357,7 +357,8 @@ class ForwardManyToOneDescriptor:
class ForwardOneToOneDescriptor(ForwardManyToOneDescriptor):
"""
- Accessor to the related object on the forward side of a one-to-one relation.
+ Accessor to the related object on the forward side of a one-to-one
+ relation.
In the example::
@@ -531,7 +532,8 @@ class ReverseOneToOneDescriptor:
- ``self`` is the descriptor managing the ``restaurant`` attribute
- ``instance`` is the ``place`` instance
- - ``value`` is the ``restaurant`` instance on the right of the equal sign
+ - ``value`` is the ``restaurant`` instance on the right of the equal
+ sign
Keep in mind that ``Restaurant`` holds the foreign key to ``Place``.
"""
@@ -586,12 +588,13 @@ class ReverseOneToOneDescriptor:
for index, field in enumerate(self.related.field.local_related_fields):
setattr(value, field.attname, related_pk[index])
- # Set the related instance cache used by __get__ to avoid an SQL query
- # when accessing the attribute we just set.
+ # Set the related instance cache used by __get__ to avoid an SQL
+ # query when accessing the attribute we just set.
self.related.set_cached_value(instance, value)
- # Set the forward accessor cache on the related object to the current
- # instance to avoid an extra SQL query if it's accessed later on.
+ # Set the forward accessor cache on the related object to the
+ # current instance to avoid an extra SQL query if it's accessed
+ # later on.
self.related.field.set_cached_value(value, instance)
def __reduce__(self):
@@ -1076,8 +1079,8 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
def _build_remove_filters(self, removed_vals):
filters = Q.create([(self.source_field_name, self.related_val)])
- # No need to add a subquery condition if removed_vals is a QuerySet without
- # filters.
+ # No need to add a subquery condition if removed_vals is a QuerySet
+ # without filters.
removed_vals_filters = (
not isinstance(removed_vals, QuerySet) or removed_vals._has_filters()
)
@@ -1145,8 +1148,8 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
- # there will already be a join on the join table, so we can just add
- # the select.
+ # there will already be a join on the join table, so we can just
+ # add the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
@@ -1475,10 +1478,10 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
def _add_items(
self, source_field_name, target_field_name, *objs, through_defaults=None
):
- # source_field_name: the PK fieldname in join table for the source object
- # target_field_name: the PK fieldname in join table for the target object
- # *objs - objects to add. Either object instances, or primary keys
- # of object instances.
+ # source_field_name: the PK fieldname in join table for the source
+ # object target_field_name: the PK fieldname in join table for the
+ # target object *objs - objects to add. Either object instances, or
+ # primary keys of object instances.
if not objs:
return
@@ -1544,10 +1547,10 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
)
def _remove_items(self, source_field_name, target_field_name, *objs):
- # source_field_name: the PK colname in join table for the source object
- # target_field_name: the PK colname in join table for the target object
- # *objs - objects to remove. Either object instances, or primary
- # keys of object instances.
+ # source_field_name: the PK colname in join table for the source
+ # object target_field_name: the PK colname in join table for the
+ # target object *objs - objects to remove. Either object instances,
+ # or primary keys of object instances.
if not objs:
return
diff --git a/django/db/models/fields/related_lookups.py b/django/db/models/fields/related_lookups.py
index 9fc7db7c34..639c29d7ba 100644
--- a/django/db/models/fields/related_lookups.py
+++ b/django/db/models/fields/related_lookups.py
@@ -28,8 +28,9 @@ def get_normalized_value(value, lhs):
try:
value_list.append(getattr(value, source.attname))
except AttributeError:
- # A case like Restaurant.objects.filter(place=restaurant_instance),
- # where place is a OneToOneField and the primary key of Restaurant.
+ # A case like
+ # Restaurant.objects.filter(place=restaurant_instance), where
+ # place is a OneToOneField and the primary key of Restaurant.
pk = value.pk
return pk if isinstance(pk, tuple) else (pk,)
return tuple(value_list)
@@ -101,10 +102,10 @@ class RelatedLookupMixin:
):
# If we get here, we are dealing with single-column relations.
self.rhs = get_normalized_value(self.rhs, self.lhs)[0]
- # We need to run the related field's get_prep_value(). Consider case
- # ForeignKey to IntegerField given value 'abc'. The ForeignKey itself
- # doesn't have validation for non-integers, so we must run validation
- # using the target field.
+ # We need to run the related field's get_prep_value(). Consider
+ # case ForeignKey to IntegerField given value 'abc'. The ForeignKey
+ # itself doesn't have validation for non-integers, so we must run
+ # validation using the target field.
if self.prepare_rhs and hasattr(self.lhs.output_field, "path_infos"):
# Get the target field. We can safely assume there is only one
# as we don't get to the direct value branch otherwise.
diff --git a/django/db/models/fields/tuple_lookups.py b/django/db/models/fields/tuple_lookups.py
index 62818a37c4..b861bbe9cc 100644
--- a/django/db/models/fields/tuple_lookups.py
+++ b/django/db/models/fields/tuple_lookups.py
@@ -370,7 +370,8 @@ class TupleIn(TupleLookupMixin, In):
return super(TupleLookupMixin, self).as_sql(compiler, connection)
# e.g.: (a, b, c) in [(x1, y1, z1), (x2, y2, z2)] as SQL:
- # WHERE (a = x1 AND b = y1 AND c = z1) OR (a = x2 AND b = y2 AND c = z2)
+ # WHERE (a = x1 AND b = y1 AND c = z1) OR (a = x2 AND b = y2 AND c =
+ # z2)
root = WhereNode([], connector=OR)
lhs = self.lhs
diff --git a/django/db/models/functions/datetime.py b/django/db/models/functions/datetime.py
index 361e4ce385..b536690c8a 100644
--- a/django/db/models/functions/datetime.py
+++ b/django/db/models/functions/datetime.py
@@ -96,7 +96,8 @@ class Extract(TimezoneMixin, Transform):
"Extract input expression must be DateField, DateTimeField, "
"TimeField, or DurationField."
)
- # Passing dates to functions expecting datetimes is most likely a mistake.
+ # Passing dates to functions expecting datetimes is most likely a
+ # mistake.
if type(field) is DateField and copy.lookup_name in (
"hour",
"minute",
diff --git a/django/db/models/functions/json.py b/django/db/models/functions/json.py
index 3a4c9c81b3..fee7dd05f4 100644
--- a/django/db/models/functions/json.py
+++ b/django/db/models/functions/json.py
@@ -98,8 +98,8 @@ class JSONObject(Func):
def as_postgresql(self, compiler, connection, **extra_context):
# Casting keys to text is only required when using JSONB_BUILD_OBJECT
- # or when using JSON_OBJECT on PostgreSQL 16+ with server-side bindings.
- # This is done in all cases for consistency.
+ # or when using JSON_OBJECT on PostgreSQL 16+ with server-side
+ # bindings. This is done in all cases for consistency.
copy = self.copy()
copy.set_source_expressions(
[
diff --git a/django/db/models/functions/text.py b/django/db/models/functions/text.py
index 9c48659bf9..28660c5e66 100644
--- a/django/db/models/functions/text.py
+++ b/django/db/models/functions/text.py
@@ -110,7 +110,8 @@ class ConcatPair(Func):
)
def coalesce(self):
- # null on either side results in null for expression, wrap with coalesce
+ # null on either side results in null for expression, wrap with
+ # coalesce
c = self.copy()
c.set_source_expressions(
[
diff --git a/django/db/models/options.py b/django/db/models/options.py
index 296309236f..0e229dea3a 100644
--- a/django/db/models/options.py
+++ b/django/db/models/options.py
@@ -348,9 +348,10 @@ class Options:
# being referenced, because there will be new relationships in the
# cache. Otherwise, expire the cache of references *to* this field.
# The mechanism for getting at the related model is slightly odd -
- # ideally, we'd just ask for field.related_model. However, related_model
- # is a cached property, and all the models haven't been loaded yet, so
- # we need to make sure we don't cache a string reference.
+ # ideally, we'd just ask for field.related_model. However,
+ # related_model is a cached property, and all the models haven't been
+ # loaded yet, so we need to make sure we don't cache a string
+ # reference.
if (
field.is_relation
and hasattr(field.remote_field, "model")
@@ -427,8 +428,8 @@ class Options:
except ValueError:
# setting not in the format app_label.model_name
# raising ImproperlyConfigured here causes problems with
- # test cleanup code - instead it is raised in get_user_model
- # or as part of validation.
+ # test cleanup code - instead it is raised in
+ # get_user_model or as part of validation.
return swapped_for
if (
@@ -534,10 +535,10 @@ class Options:
# For legacy reasons, the fields property should only contain forward
# fields that are not private or with a m2m cardinality. Therefore we
# pass these three filters as filters to the generator.
- # The third filter is a longwinded way of checking f.related_model - we don't
- # use that property directly because related_model is a cached property,
- # and all the models may not have been loaded yet; we don't want to cache
- # the string reference to the related_model.
+ # The third filter is a longwinded way of checking f.related_model - we
+ # don't use that property directly because related_model is a cached
+ # property, and all the models may not have been loaded yet; we don't
+ # want to cache the string reference to the related_model.
def is_not_an_m2m_field(f):
return not (f.is_relation and f.many_to_many)
@@ -707,7 +708,8 @@ class Options:
def all_parents(self):
"""
Return all the ancestors of this model as a tuple ordered by MRO.
- Useful for determining if something is an ancestor, regardless of lineage.
+ Useful for determining if something is an ancestor, regardless of
+ lineage.
"""
result = OrderedSet(self.parents)
for parent in self.parents:
@@ -800,8 +802,8 @@ class Options:
"""
This method is used by each model to find its reverse objects. As this
method is very expensive and is accessed frequently (it looks up every
- field in a model, in every app), it is computed on first access and then
- is set as a property on every model.
+ field in a model, in every app), it is computed on first access and
+ then is set as a property on every model.
"""
related_objects_graph = defaultdict(list)
diff --git a/django/db/models/query.py b/django/db/models/query.py
index 8163b5b973..3e3753ee5a 100644
--- a/django/db/models/query.py
+++ b/django/db/models/query.py
@@ -755,8 +755,9 @@ class QuerySet(AltersData):
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
- autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
- Multi-table models are not supported.
+ autoincrement field (except if
+ features.can_return_rows_from_bulk_insert=True). Multi-table models are
+ not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
@@ -774,8 +775,9 @@ class QuerySet(AltersData):
raise ValueError("Batch size must be a positive integer.")
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
- # MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
- # would not identify that case as involving multiple tables.
+ # MultiTableParent -> ProxyChild. Simply checking
+ # self.model._meta.proxy would not identify that case as involving
+ # multiple tables.
for parent in self.model._meta.all_parents:
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
@@ -1302,10 +1304,10 @@ class QuerySet(AltersData):
def _update(self, values):
"""
- A version of update() that accepts field objects instead of field names.
- Used primarily for model saving and not intended for use by general
- code (it requires too much poking around at model internals to be
- useful at that level).
+ A version of update() that accepts field objects instead of field
+ names. Used primarily for model saving and not intended for use by
+ general code (it requires too much poking around at model internals to
+ be useful at that level).
"""
if self.query.is_sliced:
raise TypeError("Cannot update a query once a slice has been taken.")
@@ -2365,9 +2367,9 @@ def prefetch_related_objects(model_instances, *related_lookups):
# Prepare objects:
good_objects = True
for obj in obj_list:
- # Since prefetching can re-use instances, it is possible to have
- # the same instance multiple times in obj_list, so obj might
- # already be prepared.
+ # Since prefetching can re-use instances, it is possible to
+ # have the same instance multiple times in obj_list, so obj
+ # might already be prepared.
if not hasattr(obj, "_prefetched_objects_cache"):
try:
obj._prefetched_objects_cache = {}
@@ -2376,7 +2378,8 @@ def prefetch_related_objects(model_instances, *related_lookups):
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
- # party. prefetch_related() doesn't make sense, so quit.
+ # party. prefetch_related() doesn't make sense, so
+ # quit.
good_objects = False
break
if not good_objects:
@@ -2384,8 +2387,9 @@ def prefetch_related_objects(model_instances, *related_lookups):
# Descend down tree
- # We assume that objects retrieved are homogeneous (which is the premise
- # of prefetch_related), so what applies to first object applies to all.
+ # We assume that objects retrieved are homogeneous (which is the
+ # premise of prefetch_related), so what applies to first object
+ # applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(
@@ -2462,8 +2466,8 @@ def prefetch_related_objects(model_instances, *related_lookups):
if new_obj is None:
continue
# We special-case `list` rather than something more generic
- # like `Iterable` because we don't want to accidentally match
- # user models that define __iter__.
+ # like `Iterable` because we don't want to accidentally
+ # match user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
@@ -2528,8 +2532,8 @@ def get_prefetcher(instance, through_attr, to_attr):
if through_attr == to_attr:
is_fetched = rel_obj_descriptor.is_cached
else:
- # descriptor doesn't support prefetching, so we go ahead and get
- # the attribute on the instance rather than the class to
+ # descriptor doesn't support prefetching, so we go ahead and
+ # get the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, "get_prefetch_querysets"):
@@ -2556,12 +2560,14 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
# prefetcher must have a method get_prefetch_querysets() which takes a list
# of instances, and returns a tuple:
- # (queryset of instances of self.model that are related to passed in instances,
+ # (queryset of instances of self.model that are related to passed in
+ # instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache or field name to assign to,
- # boolean that is True when the previous argument is a cache name vs a field name).
+ # boolean that is True when the previous argument is a cache name vs a
+ # field name).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
@@ -2601,8 +2607,9 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
- # We assume that objects retrieved are homogeneous (which is the premise
- # of prefetch_related), so what applies to first object applies to all.
+ # We assume that objects retrieved are homogeneous (which is the
+ # premise of prefetch_related), so what applies to first object applies
+ # to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
diff --git a/django/db/models/query_utils.py b/django/db/models/query_utils.py
index 3e644a3c26..5da3d81672 100644
--- a/django/db/models/query_utils.py
+++ b/django/db/models/query_utils.py
@@ -302,8 +302,9 @@ class RegisterLookupMixin:
@staticmethod
def merge_dicts(dicts):
"""
- Merge dicts in reverse to preference the order of the original list. e.g.,
- merge_dicts([a, b]) will preference the keys in 'a' over those in 'b'.
+ Merge dicts in reverse to preference the order of the original list.
+ e.g., merge_dicts([a, b]) will preference the keys in 'a' over those in
+ 'b'.
"""
merged = {}
for d in reversed(dicts):
@@ -435,8 +436,8 @@ def check_rel_lookup_compatibility(model, target_opts, field):
# Restaurant.objects.filter(pk__in=Restaurant.objects.all()).
# If we didn't have the primary key check, then pk__in (== place__in) would
# give Place's opts as the target opts, but Restaurant isn't compatible
- # with that. This logic applies only to primary keys, as when doing __in=qs,
- # we are going to turn this into __in=qs.values('pk') later on.
+ # with that. This logic applies only to primary keys, as when doing
+ # __in=qs, we are going to turn this into __in=qs.values('pk') later on.
return check(target_opts) or (
getattr(field, "primary_key", False) and check(field.model._meta)
)
diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py
index b0b2ac5583..f72ba907ad 100644
--- a/django/db/models/sql/compiler.py
+++ b/django/db/models/sql/compiler.py
@@ -52,10 +52,11 @@ class SQLCompiler:
# they would return an empty result set.
self.elide_empty = elide_empty
self.quote_cache = {"*": "*"}
- # The select, klass_info, and annotations are needed by QuerySet.iterator()
- # these are set as a side-effect of executing the query. Note that we calculate
- # separately a list of extra select columns needed for grammatical correctness
- # of the query, but these columns are not included in self.select.
+ # The select, klass_info, and annotations are needed by
+ # QuerySet.iterator() these are set as a side-effect of executing the
+ # query. Note that we calculate separately a list of extra select
+ # columns needed for grammatical correctness of the query, but these
+ # columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
@@ -946,9 +947,9 @@ class SQLCompiler:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
- # of order_by() and distinct(), forcing the ordering expressions
- # to be selected as well. Wrap the query in another subquery
- # to exclude extraneous selects.
+ # of order_by() and distinct(), forcing the ordering
+ # expressions to be selected as well. Wrap the query in another
+ # subquery to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
@@ -2107,8 +2108,8 @@ class SQLUpdateCompiler(SQLCompiler):
# If the result_type is NO_RESULTS then the aux_row_count is None.
aux_row_count = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_row_count:
- # Returns the row count for any related updates as the number of
- # rows updated.
+ # Returns the row count for any related updates as the number
+ # of rows updated.
row_count = aux_row_count
is_empty = False
return row_count
diff --git a/django/db/models/sql/datastructures.py b/django/db/models/sql/datastructures.py
index be6934485c..ffdd36c0c8 100644
--- a/django/db/models/sql/datastructures.py
+++ b/django/db/models/sql/datastructures.py
@@ -37,8 +37,8 @@ class Join:
- table_alias (possible alias for the table, can be None)
- join_type (can be None for those entries that aren't joined from
anything)
- - parent_alias (which table is this join's parent, can be None similarly
- to join_type)
+ - parent_alias (which table is this join's parent, can be None
+ similarly to join_type)
- as_sql()
- relabeled_clone()
"""
@@ -76,7 +76,8 @@ class Join:
def as_sql(self, compiler, connection):
"""
Generate the full
- LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
+ LEFT OUTER JOIN sometable ON sometable.somecol =
+ othertable.othercol, params
clause for this join.
"""
join_conditions = []
diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py
index 20dbf7cfaa..5e87f65e7c 100644
--- a/django/db/models/sql/query.py
+++ b/django/db/models/sql/query.py
@@ -308,10 +308,9 @@ class Query(BaseExpression):
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
- # Sometimes the query contains references to aliases in outer queries (as
- # a result of split_exclude). Correct alias quoting needs to know these
- # aliases too.
- # Map external tables to whether they are aliased.
+ # Sometimes the query contains references to aliases in outer queries
+ # (as a result of split_exclude). Correct alias quoting needs to know
+ # these aliases too. Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
@@ -593,8 +592,8 @@ class Query(BaseExpression):
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
- # field selected in the inner query, yet we must use a subquery.
- # So, make sure at least one field is selected.
+ # field selected in the inner query, yet we must use a
+ # subquery. So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
@@ -932,10 +931,11 @@ class Query(BaseExpression):
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
- The children promotion is done to avoid join chains that contain a LOUTER
- b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
- then we must also promote b->c automatically, or otherwise the promotion
- of a->b doesn't actually change anything in the query results.
+ The children promotion is done to avoid join chains that contain a
+ LOUTER b INNER c. So, if we have currently a INNER b INNER c and a->b
+ is promoted, then we must also promote b->c automatically, or otherwise
+ the promotion of a->b doesn't actually change anything in the query
+ results.
"""
aliases = list(aliases)
while aliases:
@@ -1228,7 +1228,8 @@ class Query(BaseExpression):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
- # RemovedInDjango70Warning: When the deprecation ends, replace with:
+ # RemovedInDjango70Warning: When the deprecation ends, replace
+ # with:
# "semicolons, percent signs, or SQL comments."
"semicolons, or SQL comments."
)
@@ -1256,7 +1257,8 @@ class Query(BaseExpression):
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
- # Subqueries need to use a different set of aliases than the outer query.
+ # Subqueries need to use a different set of aliases than the outer
+ # query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
@@ -1893,9 +1895,9 @@ class Query(BaseExpression):
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
- that takes a field and alias and is equivalent to `field.get_col(alias)`
- in the simple case but wraps field transforms if they were included in
- names.
+ that takes a field and alias and is equivalent to
+ `field.get_col(alias)` in the simple case but wraps field transforms if
+ they were included in names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
@@ -2052,7 +2054,8 @@ class Query(BaseExpression):
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
- # that case we need to return a Ref to the subquery's annotation.
+ # that case we need to return a Ref to the subquery's
+ # annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
@@ -2127,8 +2130,8 @@ class Query(BaseExpression):
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
- # Need to add a restriction so that outer query's filters are in effect for
- # the subquery, too.
+ # Need to add a restriction so that outer query's filters are in
+ # effect for the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
@@ -2204,7 +2207,8 @@ class Query(BaseExpression):
"""
Return True if adding filters to this instance is still possible.
- Typically, this means no limits or offsets have been put on the results.
+ Typically, this means no limits or offsets have been put on the
+ results.
"""
return not self.is_sliced
@@ -2440,8 +2444,8 @@ class Query(BaseExpression):
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
- # splitting and handling when computing the SQL column names (as part of
- # get_columns()).
+ # splitting and handling when computing the SQL column names (as part
+ # of get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
@@ -2630,8 +2634,8 @@ class Query(BaseExpression):
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
- boolean indicating if the joins in the prefix contain a LEFT OUTER join.
- _"""
+ boolean indicating if the joins in the prefix contain a LEFT OUTER
+ join. _"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
@@ -2678,9 +2682,10 @@ class Query(BaseExpression):
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
- # TODO: It might be possible to trim more joins from the start of the
- # inner query if it happens to have a longer join chain containing the
- # values in select_fields. Lets punt this one for now.
+ # TODO: It might be possible to trim more joins from the start of
+ # the inner query if it happens to have a longer join chain
+ # containing the values in select_fields. Lets punt this one for
+ # now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
diff --git a/django/db/models/sql/subqueries.py b/django/db/models/sql/subqueries.py
index 9cb971b38f..2705114a54 100644
--- a/django/db/models/sql/subqueries.py
+++ b/django/db/models/sql/subqueries.py
@@ -1,5 +1,6 @@
"""
-Query subclasses which provide extra functionality beyond simple data retrieval.
+Query subclasses which provide extra functionality beyond simple data
+retrieval.
"""
from django.core.exceptions import FieldError
@@ -116,7 +117,8 @@ class UpdateQuery(Query):
if field.generated:
continue
if hasattr(val, "resolve_expression"):
- # Resolve expressions here so that annotations are no longer needed
+ # Resolve expressions here so that annotations are no longer
+ # needed
val = val.resolve_expression(self, allow_joins=False, for_save=True)
self.values.append((field, model, val))
diff --git a/django/db/transaction.py b/django/db/transaction.py
index 0c2eee8e73..1710d1ef17 100644
--- a/django/db/transaction.py
+++ b/django/db/transaction.py
@@ -252,9 +252,9 @@ class Atomic(ContextDecorator):
# minimize overhead for the database server.
connection.savepoint_commit(sid)
except Error:
- # If rolling back to a savepoint fails, mark for
- # rollback at a higher level and avoid shadowing
- # the original exception.
+ # If rolling back to a savepoint fails, mark
+ # for rollback at a higher level and avoid
+ # shadowing the original exception.
connection.needs_rollback = True
raise
else:
@@ -270,8 +270,8 @@ class Atomic(ContextDecorator):
connection.close()
raise
else:
- # This flag will be set to True again if there isn't a savepoint
- # allowing to perform the rollback at this level.
+ # This flag will be set to True again if there isn't a
+ # savepoint allowing to perform the rollback at this level.
connection.needs_rollback = False
if connection.in_atomic_block:
# Roll back to savepoint if there is one, mark for rollback
diff --git a/django/db/utils.py b/django/db/utils.py
index faaf3bf862..4541edbe5a 100644
--- a/django/db/utils.py
+++ b/django/db/utils.py
@@ -224,7 +224,8 @@ class ConnectionRouter:
try:
method = getattr(router, action)
except AttributeError:
- # If the router doesn't have a method, skip to the next one.
+ # If the router doesn't have a method, skip to the next
+ # one.
pass
else:
chosen_db = method(model, **hints)
diff --git a/django/dispatch/__init__.py b/django/dispatch/__init__.py
index a615f9905a..bd219b4809 100644
--- a/django/dispatch/__init__.py
+++ b/django/dispatch/__init__.py
@@ -1,7 +1,8 @@
"""Multi-consumer multi-producer dispatching mechanism
-Originally based on pydispatch (BSD) https://pypi.org/project/PyDispatcher/2.0.1/
-See license.txt for original license.
+Originally based on pydispatch (BSD)
+https://pypi.org/project/PyDispatcher/2.0.1/ See license.txt for original
+license.
Heavily modified for Django's purposes.
"""
diff --git a/django/dispatch/dispatcher.py b/django/dispatch/dispatcher.py
index 4b962ce524..eed321c690 100644
--- a/django/dispatch/dispatcher.py
+++ b/django/dispatch/dispatcher.py
@@ -29,7 +29,8 @@ class Signal:
Internal attributes:
receivers:
- [((id(receiver), id(sender)), ref(receiver), ref(sender), is_async)]
+ [((id(receiver), id(sender)), ref(receiver), ref(sender),
+ is_async)]
sender_receivers_cache:
WeakKeyDictionary[sender, list[receiver]]
"""
@@ -75,12 +76,12 @@ class Signal:
weak
Whether to use weak references to the receiver. By default, the
module will attempt to use weak references to the receiver
- objects. If this parameter is false, then strong references will
- be used.
+ objects. If this parameter is false, then strong references
+ will be used.
dispatch_uid
- An identifier used to uniquely identify a particular instance of
- a receiver. This will usually be a string, though it may be
+ An identifier used to uniquely identify a particular instance
+ of a receiver. This will usually be a string, though it may be
anything hashable.
"""
from django.conf import settings
@@ -133,8 +134,8 @@ class Signal:
"""
Disconnect receiver from sender for signal.
- If weak references are used, disconnect need not be called. The receiver
- will be removed from dispatch automatically.
+ If weak references are used, disconnect need not be called. The
+ receiver will be removed from dispatch automatically.
Arguments:
@@ -173,9 +174,9 @@ class Signal:
"""
Send signal from sender to all connected receivers.
- If any receiver raises an error, the error propagates back through send,
- terminating the dispatch loop. So it's possible that all receivers
- won't be called if an error is raised.
+ If any receiver raises an error, the error propagates back through
+ send, terminating the dispatch loop. So it's possible that all
+ receivers won't be called if an error is raised.
If any receivers are asynchronous, they are called after all the
synchronous receivers via a single call to async_to_sync(). They are
@@ -292,8 +293,8 @@ class Signal:
Arguments:
sender
- The sender of the signal. Can be any Python object (normally one
- registered with a connect if you actually want something to
+ The sender of the signal. Can be any Python object (normally
+ one registered with a connect if you actually want something to
occur).
named
@@ -357,8 +358,8 @@ class Signal:
Arguments:
sender
- The sender of the signal. Can be any Python object (normally one
- registered with a connect if you actually want something to
+ The sender of the signal. Can be any Python object (normally
+ one registered with a connect if you actually want something to
occur).
named
@@ -439,8 +440,9 @@ class Signal:
receivers = None
if self.use_caching and not self._dead_receivers:
receivers = self.sender_receivers_cache.get(sender)
- # We could end up here with NO_RECEIVERS even if we do check this case in
- # .send() prior to calling _live_receivers() due to concurrent .send() call.
+ # We could end up here with NO_RECEIVERS even if we do check this
+ # case in .send() prior to calling _live_receivers() due to
+ # concurrent .send() call.
if receivers is NO_RECEIVERS:
return [], []
if receivers is None:
diff --git a/django/forms/boundfield.py b/django/forms/boundfield.py
index c0324d5c1d..f6b721c72a 100644
--- a/django/forms/boundfield.py
+++ b/django/forms/boundfield.py
@@ -114,7 +114,8 @@ class BoundField(RenderableFieldMixin):
def as_text(self, attrs=None, **kwargs):
"""
- Return a string of HTML for representing this as an .
+ Return a string of HTML for representing this as an .
"""
return self.as_widget(TextInput(), attrs, **kwargs)
@@ -124,7 +125,8 @@ class BoundField(RenderableFieldMixin):
def as_hidden(self, attrs=None, **kwargs):
"""
- Return a string of HTML for representing this as an .
+ Return a string of HTML for representing this as an .
"""
return self.as_widget(self.field.hidden_widget(), attrs, **kwargs)
@@ -181,7 +183,8 @@ class BoundField(RenderableFieldMixin):
)
# Only add the suffix if the label does not end in punctuation.
# Translators: If found as last label character, these punctuation
- # characters will prevent the default label_suffix to be appended to the label
+ # characters will prevent the default label_suffix to be appended to
+ # the label
if label_suffix and contents and contents[-1] not in _(":?.!"):
contents = format_html("{}{}", contents, label_suffix)
widget = self.field.widget
@@ -239,7 +242,8 @@ class BoundField(RenderableFieldMixin):
def auto_id(self):
"""
Calculate and return the ID attribute for this BoundField, if the
- associated Form has specified auto_id. Return an empty string otherwise.
+ associated Form has specified auto_id. Return an empty string
+ otherwise.
"""
auto_id = self.form.auto_id # Boolean or string
if auto_id and "%s" in str(auto_id):
diff --git a/django/forms/fields.py b/django/forms/fields.py
index 04aa2039fd..182d63c9b4 100644
--- a/django/forms/fields.py
+++ b/django/forms/fields.py
@@ -126,15 +126,18 @@ class Field:
# help_text -- An optional string to use as "help text" for this Field.
# error_messages -- An optional dictionary to override the default
# messages that the field will raise.
- # show_hidden_initial -- Boolean that specifies if it is needed to render a
+ # show_hidden_initial -- Boolean that specifies if it is needed to
+ # render a
# hidden widget with initial value after widget.
# validators -- List of additional validators to use
# localize -- Boolean that specifies if the field should be localized.
- # disabled -- Boolean that specifies whether the field is disabled, that
+ # disabled -- Boolean that specifies whether the field is disabled,
+ # that
# is its widget is shown in the form but not editable.
# label_suffix -- Suffix to be added to the label. Overrides
# form's label_suffix.
- # bound_field_class -- BoundField class to use in Field.get_bound_field.
+ # bound_field_class -- BoundField class to use in
+ # Field.get_bound_field.
self.required, self.label, self.initial = required, label, initial
self.show_hidden_initial = show_hidden_initial
self.help_text = help_text
@@ -727,8 +730,8 @@ class ImageField(FileField):
from PIL import Image
- # We need to get a file object for Pillow. We might have a path or we might
- # have to read the data into memory.
+ # We need to get a file object for Pillow. We might have a path or we
+ # might have to read the data into memory.
if hasattr(data, "temporary_file_path"):
file = data.temporary_file_path()
else:
@@ -929,7 +932,8 @@ class TypedChoiceField(ChoiceField):
def _coerce(self, value):
"""
- Validate that the value can be coerced to the right type (if not empty).
+ Validate that the value can be coerced to the right type (if not
+ empty).
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
diff --git a/django/forms/forms.py b/django/forms/forms.py
index d05bf4bb9e..760ba7b767 100644
--- a/django/forms/forms.py
+++ b/django/forms/forms.py
@@ -137,12 +137,12 @@ class BaseForm(RenderableFormMixin):
"""
Rearrange the fields according to field_order.
- field_order is a list of field names specifying the order. Append fields
- not included in the list in the default order for backward compatibility
- with subclasses not overriding field_order. If field_order is None,
- keep all fields in the order defined in the class. Ignore unknown
- fields in field_order to allow disabling fields in form subclasses
- without redefining ordering.
+ field_order is a list of field names specifying the order. Append
+ fields not included in the list in the default order for backward
+ compatibility with subclasses not overriding field_order. If
+ field_order is None, keep all fields in the order defined in the class.
+ Ignore unknown fields in field_order to allow disabling fields in form
+ subclasses without redefining ordering.
"""
if field_order is None:
return
@@ -367,10 +367,10 @@ class BaseForm(RenderableFormMixin):
def clean(self):
"""
- Hook for doing any extra form-wide cleaning after Field.clean() has been
- called on every field. Any ValidationError raised by this method will
- not be associated with a particular field; it will have a special-case
- association with the field named '__all__'.
+ Hook for doing any extra form-wide cleaning after Field.clean() has
+ been called on every field. Any ValidationError raised by this method
+ will not be associated with a particular field; it will have a
+ special-case association with the field named '__all__'.
"""
return self.cleaned_data
diff --git a/django/forms/formsets.py b/django/forms/formsets.py
index 94aebe4924..054cc0bc0a 100644
--- a/django/forms/formsets.py
+++ b/django/forms/formsets.py
@@ -307,10 +307,10 @@ class BaseFormSet(RenderableFormMixin):
raise AttributeError(
"'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__
)
- # Construct _ordering, which is a list of (form_index, order_field_value)
- # tuples. After constructing this list, we'll sort it by order_field_value
- # so we have a way to get to the form indexes in the order specified
- # by the form data.
+ # Construct _ordering, which is a list of (form_index,
+ # order_field_value) tuples. After constructing this list, we'll sort
+ # it by order_field_value so we have a way to get to the form indexes
+ # in the order specified by the form data.
if not hasattr(self, "_ordering"):
self._ordering = []
for i, form in enumerate(self.forms):
diff --git a/django/forms/models.py b/django/forms/models.py
index 574399ccb1..7fe803624e 100644
--- a/django/forms/models.py
+++ b/django/forms/models.py
@@ -70,7 +70,8 @@ def construct_instance(form, instance, fields=None, exclude=None):
if exclude and f.name in exclude:
continue
# Leave defaults for fields that aren't in POST data, except for
- # checkbox inputs because they don't appear in POST data if not checked.
+ # checkbox inputs because they don't appear in POST data if not
+ # checked.
if (
f.has_default()
and form[f.name].field.widget.value_omitted_from_data(
@@ -167,7 +168,8 @@ def fields_for_model(
``formfield_callback`` is a callable that takes a model field and returns
a form field.
- ``localized_fields`` is a list of names of fields which should be localized.
+ ``localized_fields`` is a list of names of fields which should be
+ localized.
``labels`` is a dictionary of model field names mapped to a label.
@@ -422,9 +424,9 @@ class BaseModelForm(BaseForm, AltersData):
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
- # validation if the model field allows blanks. If it does, the blank
- # value may be included in a unique check, so cannot be excluded
- # from validation.
+ # validation if the model field allows blanks. If it does, the
+ # blank value may be included in a unique check, so cannot be
+ # excluded from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field)
@@ -612,7 +614,8 @@ def modelform_factory(
``widgets`` is a dictionary of model field names mapped to a widget.
- ``localized_fields`` is a list of names of fields which should be localized.
+ ``localized_fields`` is a list of names of fields which should be
+ localized.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
@@ -860,7 +863,8 @@ class BaseModelFormSet(BaseFormSet, AltersData):
for d in row_data
)
if row_data and None not in row_data:
- # if we've already seen it then we have a uniqueness failure
+ # if we've already seen it then we have a uniqueness
+ # failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
@@ -887,7 +891,8 @@ class BaseModelFormSet(BaseFormSet, AltersData):
and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None
):
- # if it's a date lookup we need to get the data for all the fields
+ # if it's a date lookup we need to get the data for all the
+ # fields
if lookup == "date":
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
@@ -896,7 +901,8 @@ class BaseModelFormSet(BaseFormSet, AltersData):
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field], *date_data)
- # if we've already seen it then we have a uniqueness failure
+ # if we've already seen it then we have a uniqueness
+ # failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
@@ -1181,7 +1187,8 @@ class BaseInlineFormSet(BaseModelFormSet):
kwargs = {"pk_field": True}
else:
# The foreign key field might not be on the form, so we poke at the
- # Model field to get the label, since we need that for error messages.
+ # Model field to get the label, since we need that for error
+ # messages.
name = self.fk.name
kwargs = {
"label": getattr(
@@ -1553,12 +1560,12 @@ class ModelChoiceField(ChoiceField):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
- # choices dynamically. Return a fresh ModelChoiceIterator that has not been
- # consumed. Note that we're instantiating a new ModelChoiceIterator *each*
- # time _get_choices() is called (and, thus, each time self.choices is
- # accessed) so that we can ensure the QuerySet has not been consumed. This
- # construct might look complicated but it allows for lazy evaluation of
- # the queryset.
+ # choices dynamically. Return a fresh ModelChoiceIterator that has not
+ # been consumed. Note that we're instantiating a new
+ # ModelChoiceIterator *each* time _get_choices() is called (and, thus,
+ # each time self.choices is accessed) so that we can ensure the
+ # QuerySet has not been consumed. This construct might look complicated
+ # but it allows for lazy evaluation of the queryset.
return self.iterator(self)
choices = property(_get_choices, ChoiceField.choices.fset)
diff --git a/django/forms/widgets.py b/django/forms/widgets.py
index 9b5ad1b2b9..5a25b66e9a 100644
--- a/django/forms/widgets.py
+++ b/django/forms/widgets.py
@@ -71,7 +71,8 @@ class MediaAsset:
self.attributes = attributes
def __eq__(self, other):
- # Compare the path only, to ensure performant comparison in Media.merge.
+ # Compare the path only, to ensure performant comparison in
+ # Media.merge.
return (self.__class__ is other.__class__ and self.path == other.path) or (
isinstance(other, str) and self._path == other
)
@@ -161,8 +162,8 @@ class Media:
]
def render_css(self):
- # To keep rendering order consistent, we can't just iterate over items().
- # We need to sort the keys, and iterate over the sorted list.
+ # To keep rendering order consistent, we can't just iterate over
+ # items(). We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css)
return chain.from_iterable(
[
@@ -585,7 +586,8 @@ class ClearableFileInput(FileInput):
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
- # False signals to clear any existing value, as opposed to just None
+ # False signals to clear any existing value, as opposed to just
+ # None
return False
return upload
diff --git a/django/http/multipartparser.py b/django/http/multipartparser.py
index 4ee8401eb6..531f9a0468 100644
--- a/django/http/multipartparser.py
+++ b/django/http/multipartparser.py
@@ -168,7 +168,8 @@ class MultiPartParser:
# Instantiate the parser and stream:
stream = LazyStream(ChunkIter(self._input_data, self._chunk_size))
- # Whether or not to signal a file-completion at the beginning of the loop.
+ # Whether or not to signal a file-completion at the beginning of the
+ # loop.
old_field_name = None
counters = [0] * len(handlers)
@@ -418,8 +419,8 @@ class LazyStream:
The LazyStream wrapper allows one to get and "unget" bytes from a stream.
Given a producer object (an iterator that yields bytestrings), the
- LazyStream object will support iteration, reading, and keeping a "look-back"
- variable in case you need to "unget" some bytes.
+ LazyStream object will support iteration, reading, and keeping a
+ "look-back" variable in case you need to "unget" some bytes.
"""
def __init__(self, producer, length=None):
diff --git a/django/http/request.py b/django/http/request.py
index ff5974770f..c8adde768d 100644
--- a/django/http/request.py
+++ b/django/http/request.py
@@ -90,7 +90,9 @@ class HttpRequest:
@cached_property
def accepted_types(self):
- """Return a list of MediaType instances, in order of preference (quality)."""
+ """
+ Return a list of MediaType instances, in order of preference (quality).
+ """
header_value = self.headers.get("Accept", "*/*")
return sorted(
(
@@ -105,7 +107,8 @@ class HttpRequest:
@cached_property
def accepted_types_by_precedence(self):
"""
- Return a list of MediaType instances, in order of precedence (specificity).
+ Return a list of MediaType instances, in order of precedence
+ (specificity).
"""
return sorted(
self.accepted_types,
@@ -347,7 +350,8 @@ class HttpRequest:
@property
def upload_handlers(self):
if not self._upload_handlers:
- # If there are no upload handlers defined, initialize them from settings.
+ # If there are no upload handlers defined, initialize them from
+ # settings.
self._initialize_handlers()
return self._upload_handlers
@@ -380,7 +384,8 @@ class HttpRequest:
"You cannot access body after reading from request's data stream"
)
- # Limit the maximum request data size that will be handled in-memory.
+ # Limit the maximum request data size that will be handled
+ # in-memory.
if (
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
and int(self.META.get("CONTENT_LENGTH") or 0)
@@ -404,7 +409,9 @@ class HttpRequest:
self._files = MultiValueDict()
def _load_post_and_files(self):
- """Populate self._post and self._files if the content-type is a form type"""
+ """
+ Populate self._post and self._files if the content-type is a form type
+ """
if self.method != "POST":
self._post, self._files = (
QueryDict(encoding=self._encoding),
@@ -543,8 +550,8 @@ class QueryDict(MultiValueDict):
By default QueryDicts are immutable, though the copy() method
will always return a mutable copy.
- Both keys and values set on this class are converted from the given encoding
- (DEFAULT_CHARSET by default) to str.
+ Both keys and values set on this class are converted from the given
+ encoding (DEFAULT_CHARSET by default) to str.
"""
# These are both reset in __init__, but is specified here at the class
@@ -562,7 +569,8 @@ class QueryDict(MultiValueDict):
"max_num_fields": settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
}
if isinstance(query_string, bytes):
- # query_string normally contains URL-encoded data, a subset of ASCII.
+ # query_string normally contains URL-encoded data, a subset of
+ # ASCII.
try:
query_string = query_string.decode(self.encoding)
except UnicodeDecodeError:
@@ -747,7 +755,8 @@ class MediaType:
return False
if bool(self.range_params) == bool(other.range_params):
- # If both have params or neither have params, they must be identical.
+ # If both have params or neither have params, they must be
+ # identical.
result = self.range_params == other.range_params
else:
# If self has params and other does not, it's a match.
diff --git a/django/http/response.py b/django/http/response.py
index 6d09bc87e2..40b2d7089d 100644
--- a/django/http/response.py
+++ b/django/http/response.py
@@ -323,7 +323,8 @@ class HttpResponseBase:
# See https://docs.python.org/library/io.html#io.IOBase
# The WSGI server must call this method upon completion of the request.
- # See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
+ # See
+ # http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
def close(self):
for closer in self._resource_closers:
try:
diff --git a/django/middleware/cache.py b/django/middleware/cache.py
index df26def6b4..10fff365af 100644
--- a/django/middleware/cache.py
+++ b/django/middleware/cache.py
@@ -29,8 +29,8 @@ More details about how the caching works:
of the response's "Cache-Control" header, falling back to the
CACHE_MIDDLEWARE_SECONDS setting if the section was not found.
-* This middleware expects that a HEAD request is answered with the same response
- headers exactly like the corresponding GET request.
+* This middleware expects that a HEAD request is answered with the same
+ response headers exactly like the corresponding GET request.
* When a hit occurs, a shallow copy of the original response object is returned
from process_request.
@@ -163,7 +163,8 @@ class FetchFromCacheMiddleware(MiddlewareMixin):
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
response = self.cache.get(cache_key)
- # if it wasn't found and we are looking for a HEAD, try looking just for that
+ # if it wasn't found and we are looking for a HEAD, try looking just
+ # for that
if response is None and request.method == "HEAD":
cache_key = get_cache_key(
request, self.key_prefix, "HEAD", cache=self.cache
diff --git a/django/middleware/csrf.py b/django/middleware/csrf.py
index 2e4b30ed54..c2800cfad4 100644
--- a/django/middleware/csrf.py
+++ b/django/middleware/csrf.py
@@ -420,7 +420,8 @@ class CsrfViewMiddleware(MiddlewareMixin):
if getattr(callback, "csrf_exempt", False):
return None
- # Assume that anything not defined as 'safe' by RFC 9110 needs protection
+ # Assume that anything not defined as 'safe' by RFC 9110 needs
+ # protection
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
return self._accept(request)
diff --git a/django/middleware/http.py b/django/middleware/http.py
index 84c5466bb6..72ef52a126 100644
--- a/django/middleware/http.py
+++ b/django/middleware/http.py
@@ -6,8 +6,9 @@ from django.utils.http import parse_http_date_safe
class ConditionalGetMiddleware(MiddlewareMixin):
"""
Handle conditional GET operations. If the response has an ETag or
- Last-Modified header and the request has If-None-Match or If-Modified-Since,
- replace the response with HttpNotModified. Add an ETag header if needed.
+ Last-Modified header and the request has If-None-Match or
+ If-Modified-Since, replace the response with HttpNotModified. Add an ETag
+ header if needed.
"""
def process_response(self, request, response):
diff --git a/django/template/base.py b/django/template/base.py
index 121a47d638..3e8a59fbe7 100644
--- a/django/template/base.py
+++ b/django/template/base.py
@@ -482,7 +482,8 @@ class Parser:
nodelist = NodeList()
while self.tokens:
token = self.next_token()
- # Use the raw values here for TokenType.* for a tiny performance boost.
+ # Use the raw values here for TokenType.* for a tiny performance
+ # boost.
token_type = token.token_type.value
if token_type == 0: # TokenType.TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
@@ -845,8 +846,8 @@ class Variable:
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
- # Otherwise we'll set self.lookups so that resolve() knows we're
- # dealing with a bonafide variable
+ # Otherwise we'll set self.lookups so that resolve() knows
+ # we're dealing with a bonafide variable
if VARIABLE_ATTRIBUTE_SEPARATOR + "_" in var or var[0] == "_":
raise TemplateSyntaxError(
"Variables and attributes may "
@@ -907,7 +908,8 @@ class Variable:
# numpy < 1.9 and 1.9+ respectively
except (TypeError, AttributeError, KeyError, ValueError, IndexError):
try: # attribute lookup
- # Don't return class attributes if the class is the context:
+ # Don't return class attributes if the class is the
+ # context:
if isinstance(current, BaseContext) and getattr(
type(current), bit
):
diff --git a/django/template/context.py b/django/template/context.py
index bacce88173..60f16f9be0 100644
--- a/django/template/context.py
+++ b/django/template/context.py
@@ -81,7 +81,10 @@ class BaseContext:
context[key] = value
def __getitem__(self, key):
- "Get a variable's value, starting at the current context and going upward"
+ """
+ Get a variable's value, starting at the current context and going
+ upward
+ """
for d in reversed(self.dicts):
if key in d:
return d[key]
@@ -177,16 +180,17 @@ class RenderContext(BaseContext):
"""
A stack container for storing Template state.
- RenderContext simplifies the implementation of template Nodes by providing a
- safe place to store state between invocations of a node's `render` method.
+ RenderContext simplifies the implementation of template Nodes by providing
+ a safe place to store state between invocations of a node's `render`
+ method.
The RenderContext also provides scoping rules that are more sensible for
'template local' variables. The render context stack is pushed before each
template is rendered, creating a fresh scope with nothing in it. Name
- resolution fails if a variable is not found at the top of the RequestContext
- stack. Thus, variables are local to a specific template and don't affect the
- rendering of other templates as they would if they were stored in the normal
- template context.
+ resolution fails if a variable is not found at the top of the
+ RequestContext stack. Thus, variables are local to a specific template and
+ don't affect the rendering of other templates as they would if they were
+ stored in the normal template context.
"""
template = None
diff --git a/django/template/context_processors.py b/django/template/context_processors.py
index f9e5f218e4..214972de53 100644
--- a/django/template/context_processors.py
+++ b/django/template/context_processors.py
@@ -17,8 +17,8 @@ from django.utils.functional import SimpleLazyObject, lazy
def csrf(request):
"""
- Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if
- it has not been provided by either a view decorator or the middleware
+ Context processor that provides a CSRF token, or the string 'NOTPROVIDED'
+ if it has not been provided by either a view decorator or the middleware
"""
def _get_val():
diff --git a/django/template/defaultfilters.py b/django/template/defaultfilters.py
index 66c6e76d20..b50b790fc1 100644
--- a/django/template/defaultfilters.py
+++ b/django/template/defaultfilters.py
@@ -169,9 +169,10 @@ def floatformat(text, arg=-1):
# Exponent values can be "F", "n", "N".
number_of_digits_and_exponent_sum = 0
- # Values with more than 200 digits, or with a large exponent, are returned "as is"
- # to avoid high memory consumption and potential denial-of-service attacks.
- # The cut-off of 200 is consistent with django.utils.numberformat.floatformat().
+ # Values with more than 200 digits, or with a large exponent, are returned
+ # "as is" to avoid high memory consumption and potential denial-of-service
+ # attacks. The cut-off of 200 is consistent with
+ # django.utils.numberformat.floatformat().
if number_of_digits_and_exponent_sum > 200:
return input_val
@@ -281,7 +282,8 @@ def stringformat(value, arg):
This specifier uses Python string formatting syntax, with the exception
that the leading "%" is dropped.
- See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting
+ See
+ https://docs.python.org/library/stdtypes.html#printf-style-string-formatting
for documentation of Python string formatting.
"""
if isinstance(value, tuple):
diff --git a/django/template/defaulttags.py b/django/template/defaulttags.py
index 98fa43dd69..a20598152c 100644
--- a/django/template/defaulttags.py
+++ b/django/template/defaulttags.py
@@ -205,8 +205,8 @@ class ForNode(Node):
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
- # Create a forloop value in the context. We'll update counters on each
- # iteration just below.
+ # Create a forloop value in the context. We'll update counters on
+ # each iteration just below.
loop_dict = context["forloop"] = {
"parentloop": parentloop,
"length": len_values,
@@ -224,8 +224,8 @@ class ForNode(Node):
pop_context = False
if unpack:
- # If there are multiple loop variables, unpack the item into
- # them.
+ # If there are multiple loop variables, unpack the item
+ # into them.
try:
len_item = len(item)
except TypeError: # not an iterable
@@ -293,8 +293,8 @@ class IfChangedNode(Node):
# changes.
if "forloop" in context:
# Ifchanged is bound to the local for loop.
- # When there is a loop-in-loop, the state is bound to the inner loop,
- # so it resets when the outer loop continues.
+ # When there is a loop-in-loop, the state is bound to the inner
+ # loop, so it resets when the outer loop continues.
return context["forloop"]
else:
# Using ifchanged outside loops. Effectively this is a no-op
@@ -1274,10 +1274,10 @@ def regroup(parser, token):
and ``Trumpet``, and ``list`` is the list of musicians who play this
instrument.
- Note that ``{% regroup %}`` does not work when the list to be grouped is not
- sorted by the key you are grouping by! This means that if your list of
- musicians was not sorted by instrument, you'd need to make sure it is sorted
- before using it, i.e.::
+ Note that ``{% regroup %}`` does not work when the list to be grouped is
+ not sorted by the key you are grouping by! This means that if your list of
+ musicians was not sorted by instrument, you'd need to make sure it is
+ sorted before using it, i.e.::
{% regroup musicians|dictsort:"instrument" by instrument as grouped %}
"""
diff --git a/django/template/library.py b/django/template/library.py
index 1a65087e92..3fc4c5ebfc 100644
--- a/django/template/library.py
+++ b/django/template/library.py
@@ -72,7 +72,8 @@ class Library:
# @register.filter
return self.filter_function(name, **flags)
else:
- # @register.filter('somename') or @register.filter(name='somename')
+ # @register.filter('somename') or
+ # @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
diff --git a/django/template/loader_tags.py b/django/template/loader_tags.py
index c3eb66b577..3a0d054e62 100644
--- a/django/template/loader_tags.py
+++ b/django/template/loader_tags.py
@@ -58,7 +58,8 @@ class BlockNode(Node):
push = block = block_context.pop(self.name)
if block is None:
block = self
- # Create new block so we can store context without thread-safety issues.
+ # Create new block so we can store context without
+ # thread-safety issues.
block = type(self)(block.name, block.nodelist)
block.context = context
context["block"] = block
diff --git a/django/template/smartif.py b/django/template/smartif.py
index da32b38277..f6e8323bed 100644
--- a/django/template/smartif.py
+++ b/django/template/smartif.py
@@ -11,8 +11,8 @@ Parser and utilities for the smart 'if' tag
class TokenBase:
"""
- Base class for operators and literals, mainly for debugging and for throwing
- syntax errors.
+ Base class for operators and literals, mainly for debugging and for
+ throwing syntax errors.
"""
id = None # node/token type name
diff --git a/django/test/selenium.py b/django/test/selenium.py
index be8f4a815f..264ca7f713 100644
--- a/django/test/selenium.py
+++ b/django/test/selenium.py
@@ -29,7 +29,8 @@ class SeleniumTestCaseBase(type(LiveServerTestCase)):
multiple browsers specs are provided (e.g. --selenium=firefox,chrome).
"""
test_class = super().__new__(cls, name, bases, attrs)
- # If the test class is either browser-specific or a test base, return it.
+ # If the test class is either browser-specific or a test base, return
+ # it.
if test_class.browser or not any(
name.startswith("test") and callable(value) for name, value in attrs.items()
):
@@ -62,7 +63,8 @@ class SeleniumTestCaseBase(type(LiveServerTestCase)):
)
setattr(module, browser_test_class.__name__, browser_test_class)
return test_class
- # If no browsers were specified, skip this class (it'll still be discovered).
+ # If no browsers were specified, skip this class (it'll still be
+ # discovered).
return unittest.skip("No browsers specified.")(test_class)
@classmethod
@@ -214,8 +216,8 @@ class SeleniumTestCase(LiveServerTestCase, metaclass=SeleniumTestCaseBase):
if features is not None:
params["features"] = features
- # Not using .execute_cdp_cmd() as it isn't supported by the remote web driver
- # when using --selenium-hub.
+ # Not using .execute_cdp_cmd() as it isn't supported by the remote web
+ # driver when using --selenium-hub.
self.selenium.execute(
driver_command="executeCdpCommand",
params={"cmd": "Emulation.setEmulatedMedia", "params": params},
@@ -242,7 +244,9 @@ class SeleniumTestCase(LiveServerTestCase, metaclass=SeleniumTestCaseBase):
self.selenium.save_screenshot(path)
def get_browser_logs(self, source=None, level="ALL"):
- """Return Chrome console logs filtered by level and optionally source."""
+ """
+ Return Chrome console logs filtered by level and optionally source.
+ """
try:
logs = self.selenium.get_log("browser")
except AttributeError:
diff --git a/django/test/testcases.py b/django/test/testcases.py
index 744303f7a4..5f0c819815 100644
--- a/django/test/testcases.py
+++ b/django/test/testcases.py
@@ -281,7 +281,8 @@ class SimpleTestCase(unittest.TestCase):
# Dynamically created connections are always allowed.
and self.alias in connections
):
- # Connection has not yet been established, but the alias is not allowed.
+ # Connection has not yet been established, but the alias is not
+ # allowed.
message = cls._disallowed_database_msg % {
"test": f"{cls.__module__}.{cls.__qualname__}",
"alias": self.alias,
@@ -1231,9 +1232,9 @@ class TransactionTestCase(SimpleTestCase):
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
- # of these statements is lost, which can affect the operation of
- # tests (e.g., losing a timezone setting causing objects to be
- # created with the wrong time). To make sure this doesn't
+ # of these statements is lost, which can affect the operation
+ # of tests (e.g., losing a timezone setting causing objects to
+ # be created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all(initialized_only=True):
conn.close()
@@ -1783,9 +1784,9 @@ class LiveServerTestCase(TransactionTestCase):
framework, such as Selenium for example, instead of the built-in dummy
client.
It inherits from TransactionTestCase instead of TestCase because the
- threads don't share the same transactions (unless if using in-memory sqlite)
- and each thread needs to commit all their transactions so that the other
- thread can see the changes.
+ threads don't share the same transactions (unless if using in-memory
+ sqlite) and each thread needs to commit all their transactions so that the
+ other thread can see the changes.
"""
host = "localhost"
diff --git a/django/test/utils.py b/django/test/utils.py
index 63067c98a6..ea39794e1a 100644
--- a/django/test/utils.py
+++ b/django/test/utils.py
@@ -637,7 +637,8 @@ def compare_xml(want, got):
important. Ignore comment nodes, processing instructions, document type
node, and leading and trailing whitespaces.
- Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
+ Based on
+ https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r"[ \t\n][ \t\n]+")
diff --git a/django/urls/resolvers.py b/django/urls/resolvers.py
index 13043835dd..bad5490dcb 100644
--- a/django/urls/resolvers.py
+++ b/django/urls/resolvers.py
@@ -170,8 +170,8 @@ class CheckURLMixin:
Check that the pattern does not begin with a forward slash.
"""
if not settings.APPEND_SLASH:
- # Skip check as it can be useful to start a URL pattern with a slash
- # when APPEND_SLASH=False.
+ # Skip check as it can be useful to start a URL pattern with a
+ # slash when APPEND_SLASH=False.
return []
if self._regex.startswith(("/", "^/", "^\\/")) and not self._regex.endswith(
"/"
@@ -325,7 +325,8 @@ class RoutePattern(CheckURLMixin):
# Only use regex overhead if there are converters.
if self.converters:
if match := self.regex.search(path):
- # RoutePattern doesn't allow non-named groups so args are ignored.
+ # RoutePattern doesn't allow non-named groups so args are
+ # ignored.
kwargs = match.groupdict()
for key, value in kwargs.items():
converter = self.converters[key]
@@ -334,7 +335,8 @@ class RoutePattern(CheckURLMixin):
except ValueError:
return None
return path[match.end() :], (), kwargs
- # If this is an endpoint, the path should be exactly the same as the route.
+ # If this is an endpoint, the path should be exactly the same as the
+ # route.
elif self._is_endpoint:
if self._route == path:
return "", (), {}
@@ -680,11 +682,12 @@ class URLResolver:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = {**kwargs, **self.default_kwargs}
- # Update the sub_match_dict with the kwargs from the sub_match.
+ # Update the sub_match_dict with the kwargs from the
+ # sub_match.
sub_match_dict.update(sub_match.kwargs)
- # If there are *any* named groups, ignore all non-named groups.
- # Otherwise, pass all non-named arguments as positional
- # arguments.
+ # If there are *any* named groups, ignore all non-named
+ # groups. Otherwise, pass all non-named arguments as
+ # positional arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = args + sub_match.args
diff --git a/django/urls/utils.py b/django/urls/utils.py
index 2bea922917..b5054b163c 100644
--- a/django/urls/utils.py
+++ b/django/urls/utils.py
@@ -10,9 +10,9 @@ def get_callable(lookup_view):
"""
Return a callable corresponding to lookup_view.
* If lookup_view is already a callable, return it.
- * If lookup_view is a string import path that can be resolved to a callable,
- import that callable and return it, otherwise raise an exception
- (ImportError or ViewDoesNotExist).
+ * If lookup_view is a string import path that can be resolved to a
+ callable, import that callable and return it, otherwise raise an
+ exception (ImportError or ViewDoesNotExist).
"""
if callable(lookup_view):
return lookup_view
diff --git a/django/utils/_os.py b/django/utils/_os.py
index e9e1bcbfaf..5cd8c566a8 100644
--- a/django/utils/_os.py
+++ b/django/utils/_os.py
@@ -22,7 +22,8 @@ def safe_join(base, *paths):
# a) The next character is the path separator (to prevent conditions like
# safe_join("/dir", "/../d"))
# b) The final path must be the same as the base path.
- # c) The base path must be the most root path (meaning either "/" or "C:\\")
+ # c) The base path must be the most root path (meaning either "/" or
+ # "C:\\")
if (
not normcase(final_path).startswith(normcase(base_path + sep))
and normcase(final_path) != normcase(base_path)
diff --git a/django/utils/autoreload.py b/django/utils/autoreload.py
index a620d0adb7..c6716215f5 100644
--- a/django/utils/autoreload.py
+++ b/django/utils/autoreload.py
@@ -468,8 +468,9 @@ class WatchmanReloader(BaseReloader):
def _subscribe(self, directory, name, expression):
root, rel_path = self._watch_root(directory)
- # Only receive notifications of files changing, filtering out other types
- # like special files: https://facebook.github.io/watchman/docs/type
+ # Only receive notifications of files changing, filtering out other
+ # types like special files:
+ # https://facebook.github.io/watchman/docs/type
only_files_expression = [
"allof",
["anyof", ["type", "f"], ["type", "l"]],
diff --git a/django/utils/cache.py b/django/utils/cache.py
index 3b014fbe51..f2cbd1d033 100644
--- a/django/utils/cache.py
+++ b/django/utils/cache.py
@@ -207,7 +207,8 @@ def get_conditional_response(request, etag=None, last_modified=None, response=No
return _not_modified(request, response)
# Step 5: Test the If-Range precondition (not supported).
- # Step 6: Return original response since there isn't a conditional response.
+ # Step 6: Return original response since there isn't a conditional
+ # response.
return response
diff --git a/django/utils/csp.py b/django/utils/csp.py
index b989a47c23..d57fc98995 100644
--- a/django/utils/csp.py
+++ b/django/utils/csp.py
@@ -9,7 +9,8 @@ class CSP(StrEnum):
Content Security Policy constants for directive values and special tokens.
These constants represent:
- 1. Standard quoted string values from the CSP spec (e.g., 'self', 'unsafe-inline')
+ 1. Standard quoted string values from the CSP spec (e.g., 'self',
+ 'unsafe-inline')
2. Special placeholder tokens (NONCE) that get replaced by the middleware
Using this enum instead of raw strings provides better type checking,
@@ -43,13 +44,15 @@ class CSP(StrEnum):
WASM_UNSAFE_EVAL = "'wasm-unsafe-eval'"
# Special placeholder that gets replaced by the middleware.
- # The value itself is arbitrary and should not be mistaken for a real nonce.
+ # The value itself is arbitrary and should not be mistaken for a real
+ # nonce.
NONCE = ""
class LazyNonce(SimpleLazyObject):
"""
- Lazily generates a cryptographically secure nonce string, for use in CSP headers.
+ Lazily generates a cryptographically secure nonce string, for use in CSP
+ headers.
The nonce is only generated when first accessed (e.g., via string
interpolation or inside a template).
@@ -62,7 +65,8 @@ class LazyNonce(SimpleLazyObject):