Merge remote-tracking branch 'origin/main' into maaddae/ticket_10403

This commit is contained in:
Anthony Addae 2025-10-20 14:12:47 +00:00
commit 463de86bd2
95 changed files with 2050 additions and 277 deletions

View file

@ -29,7 +29,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'docs/requirements.txt'
- run: python -m pip install -r docs/requirements.txt
@ -47,7 +47,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
- run: python -m pip install blacken-docs
- name: Build docs
run: |
@ -68,7 +68,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
- run: python -m pip install sphinx-lint
- name: Build docs
run: |

View file

@ -27,7 +27,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
- run: python -m pip install flake8
- name: flake8
# Pinned to v3.0.0.
@ -44,8 +44,8 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
- run: python -m pip install "isort<6"
python-version: '3.14'
- run: python -m pip install isort
- name: isort
# Pinned to v3.0.0.
uses: liskin/gh-problem-matcher-wrap@e7b7beaaafa52524748b31a381160759d68d61fb

View file

@ -15,12 +15,12 @@ jobs:
# Pin to v1: https://github.com/actions/first-interaction/issues/369
- uses: actions/first-interaction@v1
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
issue_message: |
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-message: |
Hello! Thank you for your interest in Django 💪
Django issues are tracked in [Trac](https://code.djangoproject.com/) and not in this repo.
pr_message: |
pr-message: |
Hello! Thank you for your contribution 💪
As it's your first contribution be sure to check out the [patch review checklist](https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/submitting-patches/#patch-review-checklist).

View file

@ -42,7 +42,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Update apt repo

View file

@ -18,7 +18,7 @@ jobs:
python-version:
- '3.12'
- '3.13'
- '3.14-dev'
- '3.14'
name: Windows, SQLite, Python ${{ matrix.python-version }}
continue-on-error: true
steps:
@ -45,7 +45,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
- name: Install libmemcached-dev for pylibmc
run: sudo apt-get install libmemcached-dev
@ -86,7 +86,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install libmemcached-dev for pylibmc
@ -122,7 +122,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install libmemcached-dev for pylibmc
@ -167,7 +167,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install libmemcached-dev for pylibmc

View file

@ -24,7 +24,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install and upgrade packaging tools

View file

@ -24,7 +24,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install libmemcached-dev for pylibmc
@ -61,7 +61,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
python-version: '3.14'
cache: 'pip'
cache-dependency-path: 'tests/requirements/py3.txt'
- name: Install libmemcached-dev for pylibmc

View file

@ -23,7 +23,7 @@ jobs:
strategy:
matrix:
python-version:
- '3.13'
- '3.14'
name: Windows, SQLite, Python ${{ matrix.python-version }}
steps:
- name: Checkout

View file

@ -13,7 +13,7 @@ repos:
files: 'docs/.*\.txt$'
args: ["--rst-literal-block"]
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
rev: 7.0.0
hooks:
- id: isort
- repo: https://github.com/PyCQA/flake8

View file

@ -791,6 +791,7 @@ answer newbie questions, and generally made Django that much better:
Nick Presta <nick@nickpresta.ca>
Nick Sandford <nick.sandford@gmail.com>
Nick Sarbicki <nick.a.sarbicki@gmail.com>
Nick Stefan <https://github.com/nickstefan>
Niclas Olofsson <n@niclasolofsson.se>
Nicola Larosa <nico@teknico.net>
Nicolas Lara <nicolaslara@gmail.com>

View file

@ -357,7 +357,7 @@ body.popup .submit-row {
width: 48em;
}
.flatpages-flatpage #id_content {
.app-flatpages.model-flatpage #id_content {
height: 40.2em;
}

View file

@ -184,8 +184,8 @@ def get_deleted_objects(objs, request, admin_site):
class NestedObjects(Collector):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(self, *args, force_collection=True, **kwargs):
super().__init__(*args, force_collection=force_collection, **kwargs)
self.edges = {} # {from_instance: [to_instances]}
self.protected = set()
self.model_objs = defaultdict(set)
@ -242,13 +242,6 @@ class NestedObjects(Collector):
roots.extend(self._nested(root, seen, format_callback))
return roots
def can_fast_delete(self, *args, **kwargs):
"""
We always want to load the objects into memory so that we can display
them to the user in confirm page.
"""
return False
def model_format_dict(obj):
"""

View file

@ -10,12 +10,14 @@ from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, models, router, transaction
from django.db.models import DO_NOTHING, ForeignObject, ForeignObjectRel
from django.db.models.base import ModelBase, make_foreign_order_accessors
from django.db.models.deletion import DatabaseOnDelete
from django.db.models.fields import Field
from django.db.models.fields.mixins import FieldCacheMixin
from django.db.models.fields.related import (
ReverseManyToOneDescriptor,
lazy_related_operation,
)
from django.db.models.query import prefetch_related_objects
from django.db.models.query_utils import PathInfo
from django.db.models.sql import AND
from django.db.models.sql.where import WhereNode
@ -138,6 +140,16 @@ class GenericForeignKey(FieldCacheMixin, Field):
id="contenttypes.E004",
)
]
elif isinstance(field.remote_field.on_delete, DatabaseOnDelete):
return [
checks.Error(
f"'{self.model._meta.object_name}.{self.ct_field}' cannot use "
"the database-level on_delete variant.",
hint="Change the on_delete rule to the non-database variant.",
obj=self,
id="contenttypes.E006",
)
]
else:
return []
@ -200,11 +212,13 @@ class GenericForeignKeyDescriptor:
for ct_id, fkeys in fk_dict.items():
if ct_id in custom_queryset_dict:
# Return values from the custom queryset, if provided.
ret_val.extend(custom_queryset_dict[ct_id].filter(pk__in=fkeys))
queryset = custom_queryset_dict[ct_id].filter(pk__in=fkeys)
else:
instance = instance_dict[ct_id]
ct = self.field.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
queryset = ct.get_all_objects_for_this_type(pk__in=fkeys)
ret_val.extend(queryset.fetch_mode(instances[0]._state.fetch_mode))
# For doing the join in Python, we have to match both the FK val and
# the content type, so we use a callable that returns a (fk, class)
@ -253,6 +267,15 @@ class GenericForeignKeyDescriptor:
return rel_obj
else:
rel_obj = None
instance._state.fetch_mode.fetch(self, instance)
return self.field.get_cached_value(instance)
def fetch_one(self, instance):
f = self.field.model._meta.get_field(self.field.ct_field)
ct_id = getattr(instance, f.attname, None)
pk_val = getattr(instance, self.field.fk_field)
rel_obj = None
if ct_id is not None:
ct = self.field.get_content_type(id=ct_id, using=instance._state.db)
try:
@ -261,8 +284,14 @@ class GenericForeignKeyDescriptor:
)
except ObjectDoesNotExist:
pass
else:
rel_obj._state.fetch_mode = instance._state.fetch_mode
self.field.set_cached_value(instance, rel_obj)
return rel_obj
def fetch_many(self, instances):
is_cached = self.field.is_cached
missing_instances = [i for i in instances if not is_cached(i)]
return prefetch_related_objects(missing_instances, self.field.name)
def __set__(self, instance, value):
ct = None
@ -622,7 +651,11 @@ def create_generic_related_manager(superclass, rel):
Filter the queryset for the instance this manager is bound to.
"""
db = self._db or router.db_for_read(self.model, instance=self.instance)
return queryset.using(db).filter(**self.core_filters)
return (
queryset.using(db)
.fetch_mode(self.instance._state.fetch_mode)
.filter(**self.core_filters)
)
def _remove_prefetched_objects(self):
try:

View file

@ -61,7 +61,9 @@ class Command(BaseCommand):
ct_info.append(
" - Content type for %s.%s" % (ct.app_label, ct.model)
)
collector = NoFastDeleteCollector(using=using, origin=ct)
collector = Collector(
using=using, origin=ct, force_collection=True
)
collector.collect([ct])
for obj_type, objs in collector.data.items():
@ -103,11 +105,3 @@ class Command(BaseCommand):
else:
if verbosity >= 2:
self.stdout.write("Stale content types remain.")
class NoFastDeleteCollector(Collector):
def can_fast_delete(self, *args, **kwargs):
"""
Always load related objects to display them when showing confirmation.
"""
return False

View file

@ -132,6 +132,12 @@ class FieldError(Exception):
pass
class FieldFetchBlocked(FieldError):
"""On-demand fetching of a model field blocked."""
pass
NON_FIELD_ERRORS = "__all__"

View file

@ -390,6 +390,9 @@ class BaseDatabaseFeatures:
# subqueries?
supports_tuple_comparison_against_subquery = True
# Does the backend support DEFAULT as delete option?
supports_on_delete_db_default = True
# Collation names for use by the Django test suite.
test_collations = {
"ci": None, # Case-insensitive.

View file

@ -254,6 +254,16 @@ class BaseDatabaseOperations:
if sql
)
def fk_on_delete_sql(self, operation):
"""
Return the SQL to make an ON DELETE statement.
"""
if operation in ["CASCADE", "SET NULL", "SET DEFAULT"]:
return f" ON DELETE {operation}"
if operation == "":
return ""
raise NotImplementedError(f"ON DELETE {operation} is not supported.")
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join([f"({sql})" for sql in placeholder_rows_sql])

View file

@ -121,7 +121,7 @@ class BaseDatabaseSchemaEditor:
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
"REFERENCES %(to_table)s (%(to_column)s)%(on_delete_db)s%(deferrable)s"
)
sql_create_inline_fk = None
sql_create_column_inline_fk = None
@ -241,6 +241,7 @@ class BaseDatabaseSchemaEditor:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"on_delete_db": self._create_on_delete_sql(model, field),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
@ -759,6 +760,7 @@ class BaseDatabaseSchemaEditor:
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"deferrable": self.connection.ops.deferrable_sql(),
"on_delete_db": self._create_on_delete_sql(model, field),
}
# Otherwise, add FK constraints later.
else:
@ -1628,6 +1630,13 @@ class BaseDatabaseSchemaEditor:
new_name=self.quote_name(new_name),
)
def _create_on_delete_sql(self, model, field):
remote_field = field.remote_field
try:
return remote_field.on_delete.on_delete_sql(self)
except AttributeError:
return ""
def _index_columns(self, table, columns, col_suffixes, opclasses):
return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes)
@ -1740,6 +1749,7 @@ class BaseDatabaseSchemaEditor:
to_table=to_table,
to_column=to_column,
deferrable=deferrable,
on_delete_db=self._create_on_delete_sql(model, field),
)
def _fk_constraint_name(self, model, field, suffix):

View file

@ -44,6 +44,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
SET V_I = P_I;
END;
"""
supports_on_delete_db_default = False
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
# COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an

View file

@ -14,7 +14,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_column_inline_fk = (
", ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s(%(to_column)s)"
"REFERENCES %(to_table)s(%(to_column)s)%(on_delete_db)s"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"

View file

@ -78,6 +78,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
supports_json_field_contains = False
supports_json_negative_indexing = False
supports_collation_on_textfield = False
supports_on_delete_db_default = False
test_now_utc_template = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
django_test_expected_failures = {
# A bug in Django/oracledb with respect to string handling (#23843).

View file

@ -20,7 +20,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(on_delete_db)"
"s%(deferrable)s"
)
sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"

View file

@ -1,10 +1,10 @@
from django.db.models.sql.compiler import (
from django.db.models.sql.compiler import ( # isort:skip
SQLAggregateCompiler,
SQLCompiler,
SQLDeleteCompiler,
SQLInsertCompiler as BaseSQLInsertCompiler,
SQLUpdateCompiler,
)
from django.db.models.sql.compiler import SQLInsertCompiler as BaseSQLInsertCompiler
from django.db.models.sql.compiler import SQLUpdateCompiler
__all__ = [
"SQLAggregateCompiler",

View file

@ -28,8 +28,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
# Setting the constraint to IMMEDIATE to allow changing data in the same
# transaction.
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
"; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE"
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(on_delete_db)s"
"%(deferrable)s; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE"
)
# Setting the constraint to IMMEDIATE runs any deferred checks to allow
# dropping it in the same transaction.

View file

@ -10,7 +10,7 @@ from .base import Database
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (3, 31)
minimum_database_version = (3, 37)
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
@ -26,8 +26,6 @@ class DatabaseFeatures(BaseDatabaseFeatures):
time_cast_precision = 3
can_release_savepoints = True
has_case_insensitive_like = True
# Is "ALTER TABLE ... DROP COLUMN" supported?
can_alter_table_drop_column = Database.sqlite_version_info >= (3, 35, 5)
supports_parentheses_in_compound = False
can_defer_constraint_checks = True
supports_over_clause = True
@ -57,6 +55,9 @@ class DatabaseFeatures(BaseDatabaseFeatures):
insert_test_table_with_defaults = 'INSERT INTO {} ("null") VALUES (1)'
supports_default_keyword_in_insert = False
supports_unlimited_charfield = True
can_return_columns_from_insert = True
can_return_rows_from_bulk_insert = True
can_return_rows_from_update = True
@cached_property
def django_test_skips(self):
@ -146,8 +147,8 @@ class DatabaseFeatures(BaseDatabaseFeatures):
"""
SQLite has a variable limit per query. The limit can be changed using
the SQLITE_MAX_VARIABLE_NUMBER compile-time option (which defaults to
999 in versions < 3.32.0 or 32766 in newer versions) or lowered per
connection at run-time with setlimit(SQLITE_LIMIT_VARIABLE_NUMBER, N).
32766) or lowered per connection at run-time with
setlimit(SQLITE_LIMIT_VARIABLE_NUMBER, N).
"""
return self.connection.connection.getlimit(sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER)
@ -163,15 +164,3 @@ class DatabaseFeatures(BaseDatabaseFeatures):
can_introspect_json_field = property(operator.attrgetter("supports_json_field"))
has_json_object_function = property(operator.attrgetter("supports_json_field"))
@cached_property
def can_return_columns_from_insert(self):
return Database.sqlite_version_info >= (3, 35)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
can_return_rows_from_update = property(
operator.attrgetter("can_return_columns_from_insert")
)

View file

@ -342,8 +342,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
"PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name)
)
for row in cursor.fetchall():
# SQLite 3.8.9+ has 5 columns, however older versions only give 3
# columns. Discard last 2 columns if there.
# Discard last 2 columns.
number, index, unique = row[:3]
cursor.execute(
"SELECT sql FROM sqlite_master WHERE type='index' AND name=%s",

View file

@ -13,7 +13,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_table = "DROP TABLE %(table)s"
sql_create_fk = None
sql_create_inline_fk = (
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
"REFERENCES %(to_table)s (%(to_column)s)%(on_delete_db)s DEFERRABLE INITIALLY "
"DEFERRED"
)
sql_create_column_inline_fk = sql_create_inline_fk
sql_create_unique = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)"
@ -338,10 +339,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
self.delete_model(field.remote_field.through)
# For explicit "through" M2M fields, do nothing
elif (
self.connection.features.can_alter_table_drop_column
# Primary keys, unique fields, indexed fields, and foreign keys are
# not supported in ALTER TABLE DROP COLUMN.
and not field.primary_key
not field.primary_key
and not field.unique
and not field.db_index
and not (field.remote_field and field.db_constraint)

View file

@ -16,6 +16,7 @@ from django.conf import SettingsReference
from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.utils import COMPILED_REGEX_TYPE, RegexObject
from django.db.models.deletion import DatabaseOnDelete
from django.utils.functional import LazyObject, Promise
from django.utils.version import get_docs_version
@ -71,6 +72,12 @@ class ChoicesSerializer(BaseSerializer):
return serializer_factory(self.value.value).serialize()
class DatabaseOnDeleteSerializer(BaseSerializer):
def serialize(self):
path = self.value.__class__.__module__
return f"{path}.{self.value.__name__}", {f"import {path}"}
class DateTimeSerializer(BaseSerializer):
"""For datetime.*, except datetime.datetime."""
@ -363,6 +370,7 @@ class Serializer:
pathlib.PurePath: PathSerializer,
os.PathLike: PathLikeSerializer,
zoneinfo.ZoneInfo: ZoneInfoSerializer,
DatabaseOnDelete: DatabaseOnDeleteSerializer,
}
@classmethod

View file

@ -6,6 +6,9 @@ from django.db.models.constraints import * # NOQA
from django.db.models.constraints import __all__ as constraints_all
from django.db.models.deletion import (
CASCADE,
DB_CASCADE,
DB_SET_DEFAULT,
DB_SET_NULL,
DO_NOTHING,
PROTECT,
RESTRICT,
@ -36,6 +39,7 @@ from django.db.models.expressions import (
WindowFrame,
WindowFrameExclusion,
)
from django.db.models.fetch_modes import FETCH_ONE, FETCH_PEERS, RAISE
from django.db.models.fields import * # NOQA
from django.db.models.fields import __all__ as fields_all
from django.db.models.fields.composite import CompositePrimaryKey
@ -74,6 +78,9 @@ __all__ += [
"ObjectDoesNotExist",
"signals",
"CASCADE",
"DB_CASCADE",
"DB_SET_DEFAULT",
"DB_SET_NULL",
"DO_NOTHING",
"PROTECT",
"RESTRICT",
@ -105,6 +112,9 @@ __all__ += [
"GeneratedField",
"JSONField",
"OrderWrt",
"FETCH_ONE",
"FETCH_PEERS",
"RAISE",
"Lookup",
"Transform",
"Manager",

View file

@ -30,8 +30,9 @@ from django.db import (
)
from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, Collector
from django.db.models.deletion import CASCADE, DO_NOTHING, Collector, DatabaseOnDelete
from django.db.models.expressions import DatabaseDefault
from django.db.models.fetch_modes import FETCH_ONE
from django.db.models.fields.composite import CompositePrimaryKey
from django.db.models.fields.related import (
ForeignObjectRel,
@ -466,6 +467,14 @@ class ModelStateFieldsCacheDescriptor:
return res
class ModelStateFetchModeDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.fetch_mode = FETCH_ONE
return res
class ModelState:
"""Store model instance state."""
@ -476,6 +485,14 @@ class ModelState:
# on the actual save.
adding = True
fields_cache = ModelStateFieldsCacheDescriptor()
fetch_mode = ModelStateFetchModeDescriptor()
peers = ()
def __getstate__(self):
state = self.__dict__.copy()
# Weak references can't be pickled.
state.pop("peers", None)
return state
class Model(AltersData, metaclass=ModelBase):
@ -595,7 +612,7 @@ class Model(AltersData, metaclass=ModelBase):
post_init.send(sender=cls, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
def from_db(cls, db, field_names, values, *, fetch_mode=None):
if len(values) != len(cls._meta.concrete_fields):
values_iter = iter(values)
values = [
@ -605,6 +622,8 @@ class Model(AltersData, metaclass=ModelBase):
new = cls(*values)
new._state.adding = False
new._state.db = db
if fetch_mode is not None:
new._state.fetch_mode = fetch_mode
return new
def __repr__(self):
@ -714,8 +733,8 @@ class Model(AltersData, metaclass=ModelBase):
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
When fetching deferred fields for a single instance (the FETCH_ONE
fetch mode), the deferred loading uses this method.
"""
if fields is None:
self._prefetched_objects_cache = {}
@ -1751,6 +1770,7 @@ class Model(AltersData, metaclass=ModelBase):
*cls._check_fields(**kwargs),
*cls._check_m2m_through_same_relationship(),
*cls._check_long_column_names(databases),
*cls._check_related_fields(),
]
clash_errors = (
*cls._check_id_field(),
@ -2436,6 +2456,29 @@ class Model(AltersData, metaclass=ModelBase):
return errors
@classmethod
def _check_related_fields(cls):
has_db_variant = False
has_python_variant = False
for rel in cls._meta.get_fields():
if rel.related_model:
if not (on_delete := getattr(rel.remote_field, "on_delete", None)):
continue
if isinstance(on_delete, DatabaseOnDelete):
has_db_variant = True
elif on_delete != DO_NOTHING:
has_python_variant = True
if has_db_variant and has_python_variant:
return [
checks.Error(
"The model cannot have related fields with both "
"database-level and Python-level on_delete variants.",
obj=cls,
id="models.E050",
)
]
return []
@classmethod
def _get_expr_references(cls, expr):
if isinstance(expr, Q):

View file

@ -81,6 +81,28 @@ def DO_NOTHING(collector, field, sub_objs, using):
pass
class DatabaseOnDelete:
def __init__(self, operation, name, forced_collector=None):
self.operation = operation
self.forced_collector = forced_collector
self.__name__ = name
__call__ = DO_NOTHING
def on_delete_sql(self, schema_editor):
return schema_editor.connection.ops.fk_on_delete_sql(self.operation)
def __str__(self):
return self.__name__
DB_CASCADE = DatabaseOnDelete("CASCADE", "DB_CASCADE", CASCADE)
DB_SET_DEFAULT = DatabaseOnDelete("SET DEFAULT", "DB_SET_DEFAULT")
DB_SET_NULL = DatabaseOnDelete("SET NULL", "DB_SET_NULL")
SKIP_COLLECTION = frozenset([DO_NOTHING, DB_CASCADE, DB_SET_DEFAULT, DB_SET_NULL])
def get_candidate_relations_to_delete(opts):
# The candidate relations are the ones that come from N-1 and 1-1
# relations. N-N (i.e., many-to-many) relations aren't candidates for
@ -93,10 +115,12 @@ def get_candidate_relations_to_delete(opts):
class Collector:
def __init__(self, using, origin=None):
def __init__(self, using, origin=None, force_collection=False):
self.using = using
# A Model or QuerySet object.
self.origin = origin
# Force collecting objects for deletion on the Python-level.
self.force_collection = force_collection
# Initially, {model: {instances}}, later values become lists.
self.data = defaultdict(set)
# {(field, value): [instances, …]}
@ -194,6 +218,8 @@ class Collector:
skipping parent -> child -> parent chain preventing fast delete of
the child.
"""
if self.force_collection:
return False
if from_field and from_field.remote_field.on_delete is not CASCADE:
return False
if hasattr(objs, "_meta"):
@ -215,7 +241,7 @@ class Collector:
and
# Foreign keys pointing to this model.
all(
related.field.remote_field.on_delete is DO_NOTHING
related.field.remote_field.on_delete in SKIP_COLLECTION
for related in get_candidate_relations_to_delete(opts)
)
and (
@ -316,8 +342,13 @@ class Collector:
continue
field = related.field
on_delete = field.remote_field.on_delete
if on_delete == DO_NOTHING:
continue
if on_delete in SKIP_COLLECTION:
if self.force_collection and (
forced_on_delete := getattr(on_delete, "forced_collector", None)
):
on_delete = forced_on_delete
else:
continue
related_model = related.related_model
if self.can_fast_delete(related_model, from_field=field):
model_fast_deletes[related_model].append(field)

View file

@ -0,0 +1,61 @@
from django.core.exceptions import FieldFetchBlocked
class FetchMode:
__slots__ = ()
track_peers = False
def fetch(self, fetcher, instance):
raise NotImplementedError("Subclasses must implement this method.")
class FetchOne(FetchMode):
__slots__ = ()
def fetch(self, fetcher, instance):
fetcher.fetch_one(instance)
def __reduce__(self):
return "FETCH_ONE"
FETCH_ONE = FetchOne()
class FetchPeers(FetchMode):
__slots__ = ()
track_peers = True
def fetch(self, fetcher, instance):
instances = [
peer
for peer_weakref in instance._state.peers
if (peer := peer_weakref()) is not None
]
if len(instances) > 1:
fetcher.fetch_many(instances)
else:
fetcher.fetch_one(instance)
def __reduce__(self):
return "FETCH_PEERS"
FETCH_PEERS = FetchPeers()
class Raise(FetchMode):
__slots__ = ()
def fetch(self, fetcher, instance):
klass = instance.__class__.__qualname__
field_name = fetcher.field.name
raise FieldFetchBlocked(f"Fetching of {klass}.{field_name} blocked.") from None
def __reduce__(self):
return "RAISE"
RAISE = Raise()

View file

@ -155,8 +155,6 @@ class Field(RegisterLookupMixin):
"error_messages",
"help_text",
"limit_choices_to",
# Database-level options are not supported, see #21961.
"on_delete",
"related_name",
"related_query_name",
"validators",

View file

@ -248,7 +248,7 @@ class FileField(Field):
):
self._primary_key_set_explicitly = "primary_key" in kwargs
self.storage = storage or default_storage
self.storage = storage if storage is not None else default_storage
if callable(self.storage):
# Hold a reference to the callable for deconstruct().
self._storage_callable = self.storage

View file

@ -6,11 +6,19 @@ from django import forms
from django.apps import apps
from django.conf import SettingsReference, settings
from django.core import checks, exceptions
from django.db import connection, router
from django.db import connection, connections, router
from django.db.backends import utils
from django.db.models import Q
from django.db.models import NOT_PROVIDED, Q
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, SET_DEFAULT, SET_NULL
from django.db.models.deletion import (
CASCADE,
DB_SET_DEFAULT,
DB_SET_NULL,
DO_NOTHING,
SET_DEFAULT,
SET_NULL,
DatabaseOnDelete,
)
from django.db.models.query_utils import PathInfo
from django.db.models.utils import make_model_tuple
from django.utils.functional import cached_property
@ -1041,18 +1049,21 @@ class ForeignKey(ForeignObject):
return cls
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_on_delete(),
*self._check_on_delete(databases),
*self._check_unique(),
]
def _check_on_delete(self):
def _check_on_delete(self, databases):
on_delete = getattr(self.remote_field, "on_delete", None)
if on_delete == SET_NULL and not self.null:
return [
errors = []
if on_delete in [DB_SET_NULL, SET_NULL] and not self.null:
errors.append(
checks.Error(
"Field specifies on_delete=SET_NULL, but cannot be null.",
f"Field specifies on_delete={on_delete.__name__}, but cannot be "
"null.",
hint=(
"Set null=True argument on the field, or change the on_delete "
"rule."
@ -1060,18 +1071,80 @@ class ForeignKey(ForeignObject):
obj=self,
id="fields.E320",
)
]
)
elif on_delete == SET_DEFAULT and not self.has_default():
return [
errors.append(
checks.Error(
"Field specifies on_delete=SET_DEFAULT, but has no default value.",
hint="Set a default value, or change the on_delete rule.",
obj=self,
id="fields.E321",
)
]
else:
return []
)
elif on_delete == DB_SET_DEFAULT:
if self.db_default is NOT_PROVIDED:
errors.append(
checks.Error(
"Field specifies on_delete=DB_SET_DEFAULT, but has "
"no db_default value.",
hint="Set a db_default value, or change the on_delete rule.",
obj=self,
id="fields.E322",
)
)
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
"supports_on_delete_db_default"
in self.model._meta.required_db_features
or connection.features.supports_on_delete_db_default
):
errors.append(
checks.Error(
f"{connection.display_name} does not support a "
"DB_SET_DEFAULT.",
hint="Change the on_delete rule to SET_DEFAULT.",
obj=self,
id="fields.E324",
),
)
elif not isinstance(self.remote_field.model, str) and on_delete != DO_NOTHING:
# Database and Python variants cannot be mixed in a chain of
# model references.
is_db_on_delete = isinstance(on_delete, DatabaseOnDelete)
ref_model_related_fields = (
ref_model_field.remote_field
for ref_model_field in self.remote_field.model._meta.get_fields()
if ref_model_field.related_model
and hasattr(ref_model_field.remote_field, "on_delete")
)
for ref_remote_field in ref_model_related_fields:
if (
ref_remote_field.on_delete is not None
and ref_remote_field.on_delete != DO_NOTHING
and isinstance(ref_remote_field.on_delete, DatabaseOnDelete)
is not is_db_on_delete
):
on_delete_type = "database" if is_db_on_delete else "Python"
ref_on_delete_type = "Python" if is_db_on_delete else "database"
errors.append(
checks.Error(
f"Field specifies {on_delete_type}-level on_delete "
"variant, but referenced model uses "
f"{ref_on_delete_type}-level variant.",
hint=(
"Use either database or Python on_delete variants "
"uniformly in the references chain."
),
obj=self,
id="fields.E323",
)
)
break
return errors
def _check_unique(self, **kwargs):
return (

View file

@ -78,7 +78,7 @@ from django.db.models.expressions import ColPairs
from django.db.models.fields.tuple_lookups import TupleIn
from django.db.models.functions import RowNumber
from django.db.models.lookups import GreaterThan, LessThanOrEqual
from django.db.models.query import QuerySet
from django.db.models.query import QuerySet, prefetch_related_objects
from django.db.models.query_utils import DeferredAttribute
from django.db.models.utils import AltersData, resolve_callables
from django.utils.functional import cached_property
@ -166,8 +166,10 @@ class ForwardManyToOneDescriptor:
def is_cached(self, instance):
return self.field.is_cached(instance)
def get_queryset(self, **hints):
return self.field.remote_field.model._base_manager.db_manager(hints=hints).all()
def get_queryset(self, *, instance):
return self.field.remote_field.model._base_manager.db_manager(
hints={"instance": instance}
).fetch_mode(instance._state.fetch_mode)
def get_prefetch_querysets(self, instances, querysets=None):
if querysets and len(querysets) != 1:
@ -175,8 +177,9 @@ class ForwardManyToOneDescriptor:
"querysets argument of get_prefetch_querysets() should have a length "
"of 1."
)
queryset = querysets[0] if querysets else self.get_queryset()
queryset._add_hints(instance=instances[0])
queryset = (
querysets[0] if querysets else self.get_queryset(instance=instances[0])
)
rel_obj_attr = self.field.get_foreign_related_value
instance_attr = self.field.get_local_related_value
@ -254,13 +257,9 @@ class ForwardManyToOneDescriptor:
break
if rel_obj is None and has_value:
rel_obj = self.get_object(instance)
remote_field = self.field.remote_field
# If this is a one-to-one relation, set the reverse accessor
# cache on the related object to the current instance to avoid
# an extra SQL query if it's accessed later on.
if not remote_field.multiple:
remote_field.set_cached_value(rel_obj, instance)
instance._state.fetch_mode.fetch(self, instance)
return self.field.get_cached_value(instance)
self.field.set_cached_value(instance, rel_obj)
if rel_obj is None and not self.field.null:
@ -270,6 +269,21 @@ class ForwardManyToOneDescriptor:
else:
return rel_obj
def fetch_one(self, instance):
rel_obj = self.get_object(instance)
self.field.set_cached_value(instance, rel_obj)
# If this is a one-to-one relation, set the reverse accessor cache on
# the related object to the current instance to avoid an extra SQL
# query if it's accessed later on.
remote_field = self.field.remote_field
if not remote_field.multiple:
remote_field.set_cached_value(rel_obj, instance)
def fetch_many(self, instances):
is_cached = self.is_cached
missing_instances = [i for i in instances if not is_cached(i)]
prefetch_related_objects(missing_instances, self.field.name)
def __set__(self, instance, value):
"""
Set the related instance through the forward relation.
@ -384,6 +398,7 @@ class ForwardOneToOneDescriptor(ForwardManyToOneDescriptor):
obj = rel_model(**kwargs)
obj._state.adding = instance._state.adding
obj._state.db = instance._state.db
obj._state.fetch_mode = instance._state.fetch_mode
return obj
return super().get_object(instance)
@ -445,8 +460,10 @@ class ReverseOneToOneDescriptor:
def is_cached(self, instance):
return self.related.is_cached(instance)
def get_queryset(self, **hints):
return self.related.related_model._base_manager.db_manager(hints=hints).all()
def get_queryset(self, *, instance):
return self.related.related_model._base_manager.db_manager(
hints={"instance": instance}
).fetch_mode(instance._state.fetch_mode)
def get_prefetch_querysets(self, instances, querysets=None):
if querysets and len(querysets) != 1:
@ -454,8 +471,9 @@ class ReverseOneToOneDescriptor:
"querysets argument of get_prefetch_querysets() should have a length "
"of 1."
)
queryset = querysets[0] if querysets else self.get_queryset()
queryset._add_hints(instance=instances[0])
queryset = (
querysets[0] if querysets else self.get_queryset(instance=instances[0])
)
rel_obj_attr = self.related.field.get_local_related_value
instance_attr = self.related.field.get_foreign_related_value
@ -504,16 +522,8 @@ class ReverseOneToOneDescriptor:
if not instance._is_pk_set():
rel_obj = None
else:
filter_args = self.related.field.get_forward_related_filter(instance)
try:
rel_obj = self.get_queryset(instance=instance).get(**filter_args)
except self.related.related_model.DoesNotExist:
rel_obj = None
else:
# Set the forward accessor cache on the related object to
# the current instance to avoid an extra SQL query if it's
# accessed later on.
self.related.field.set_cached_value(rel_obj, instance)
instance._state.fetch_mode.fetch(self, instance)
rel_obj = self.related.get_cached_value(instance)
self.related.set_cached_value(instance, rel_obj)
if rel_obj is None:
@ -524,6 +534,34 @@ class ReverseOneToOneDescriptor:
else:
return rel_obj
@property
def field(self):
"""
Add compatibility with the fetcher protocol. While self.related is not
a field but a OneToOneRel, it quacks enough like a field to work.
"""
return self.related
def fetch_one(self, instance):
# Kept for backwards compatibility with overridden
# get_forward_related_filter()
filter_args = self.related.field.get_forward_related_filter(instance)
try:
rel_obj = self.get_queryset(instance=instance).get(**filter_args)
except self.related.related_model.DoesNotExist:
rel_obj = None
else:
self.related.field.set_cached_value(rel_obj, instance)
self.related.set_cached_value(instance, rel_obj)
def fetch_many(self, instances):
is_cached = self.is_cached
missing_instances = [i for i in instances if not is_cached(i)]
prefetch_related_objects(
missing_instances,
self.related.get_accessor_name(),
)
def __set__(self, instance, value):
"""
Set the related instance through the reverse relation.
@ -703,6 +741,7 @@ def create_reverse_many_to_one_manager(superclass, rel):
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
queryset._fetch_mode = self.instance._state.fetch_mode
queryset._defer_next_filter = True
queryset = queryset.filter(**self.core_filters)
for field in self.field.foreign_related_fields:
@ -1104,6 +1143,7 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
queryset._fetch_mode = self.instance._state.fetch_mode
queryset._defer_next_filter = True
return queryset._next_is_sticky().filter(**self.core_filters)

View file

@ -8,6 +8,7 @@ import warnings
from contextlib import nullcontext
from functools import reduce
from itertools import chain, islice
from weakref import ref as weak_ref
from asgiref.sync import sync_to_async
@ -26,6 +27,7 @@ from django.db.models import AutoField, DateField, DateTimeField, Field, Max, sq
from django.db.models.constants import LOOKUP_SEP, OnConflict
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, DatabaseDefault, F, Value, When
from django.db.models.fetch_modes import FETCH_ONE
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE, ROW_COUNT
@ -88,6 +90,7 @@ class ModelIterable(BaseIterable):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
fetch_mode = queryset._fetch_mode
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(
@ -104,7 +107,7 @@ class ModelIterable(BaseIterable):
init_list = [
f[0].target.attname for f in select[model_fields_start:model_fields_end]
]
related_populators = get_related_populators(klass_info, select, db)
related_populators = get_related_populators(klass_info, select, db, fetch_mode)
known_related_objects = [
(
field,
@ -122,10 +125,17 @@ class ModelIterable(BaseIterable):
)
for field, related_objs in queryset._known_related_objects.items()
]
peers = []
for row in compiler.results_iter(results):
obj = model_cls.from_db(
db, init_list, row[model_fields_start:model_fields_end]
db,
init_list,
row[model_fields_start:model_fields_end],
fetch_mode=fetch_mode,
)
if fetch_mode.track_peers:
peers.append(weak_ref(obj))
obj._state.peers = peers
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
@ -183,10 +193,17 @@ class RawModelIterable(BaseIterable):
query_iterator = compiler.composite_fields_to_tuples(
query_iterator, cols
)
fetch_mode = self.queryset._fetch_mode
peers = []
for values in query_iterator:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
instance = model_cls.from_db(
db, model_init_names, model_init_values, fetch_mode=fetch_mode
)
if fetch_mode.track_peers:
peers.append(weak_ref(instance))
instance._state.peers = peers
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
@ -293,6 +310,7 @@ class QuerySet(AltersData):
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fetch_mode = FETCH_ONE
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@ -665,6 +683,7 @@ class QuerySet(AltersData):
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
obj._state.fetch_mode = self._fetch_mode
return obj
create.alters_data = True
@ -1442,6 +1461,7 @@ class QuerySet(AltersData):
params=params,
translations=translations,
using=using,
fetch_mode=self._fetch_mode,
)
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
@ -1913,6 +1933,12 @@ class QuerySet(AltersData):
clone._db = alias
return clone
def fetch_mode(self, fetch_mode):
"""Set the fetch mode for the QuerySet."""
clone = self._chain()
clone._fetch_mode = fetch_mode
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@ -2051,6 +2077,7 @@ class QuerySet(AltersData):
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c._iterable_class = self._iterable_class
c._fetch_mode = self._fetch_mode
c._fields = self._fields
return c
@ -2141,8 +2168,14 @@ class QuerySet(AltersData):
raise TypeError(f"Cannot use {operator_} operator with combined queryset.")
def _check_ordering_first_last_queryset_aggregation(self, method):
if isinstance(self.query.group_by, tuple) and not any(
col.output_field is self.model._meta.pk for col in self.query.group_by
if (
isinstance(self.query.group_by, tuple)
# Raise if the pk fields are not in the group_by.
and self.model._meta.pk
not in {col.output_field for col in self.query.group_by}
and set(self.model._meta.pk_fields).difference(
{col.target for col in self.query.group_by}
)
):
raise TypeError(
f"Cannot use QuerySet.{method}() on an unordered queryset performing "
@ -2180,6 +2213,7 @@ class RawQuerySet:
translations=None,
using=None,
hints=None,
fetch_mode=FETCH_ONE,
):
self.raw_query = raw_query
self.model = model
@ -2191,6 +2225,7 @@ class RawQuerySet:
self._result_cache = None
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._fetch_mode = fetch_mode
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
@ -2289,6 +2324,7 @@ class RawQuerySet:
params=self.params,
translations=self.translations,
using=alias,
fetch_mode=self._fetch_mode,
)
@cached_property
@ -2752,8 +2788,9 @@ class RelatedPopulator:
model instance.
"""
def __init__(self, klass_info, select, db):
def __init__(self, klass_info, select, db, fetch_mode):
self.db = db
self.fetch_mode = fetch_mode
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
@ -2806,7 +2843,9 @@ class RelatedPopulator:
# relationship. Therefore checking for a single member of the primary
# key is enough to determine if the referenced object exists or not.
self.pk_idx = self.init_list.index(self.model_cls._meta.pk_fields[0].attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
self.related_populators = get_related_populators(
klass_info, select, self.db, fetch_mode
)
self.local_setter = klass_info["local_setter"]
self.remote_setter = klass_info["remote_setter"]
@ -2818,7 +2857,12 @@ class RelatedPopulator:
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
obj = self.model_cls.from_db(
self.db,
self.init_list,
obj_data,
fetch_mode=self.fetch_mode,
)
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
self.local_setter(from_obj, obj)
@ -2826,10 +2870,10 @@ class RelatedPopulator:
self.remote_setter(obj, from_obj)
def get_related_populators(klass_info, select, db):
def get_related_populators(klass_info, select, db, fetch_mode):
iterators = []
related_klass_infos = klass_info.get("related_klass_infos", [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
rel_cls = RelatedPopulator(rel_klass_info, select, db, fetch_mode)
iterators.append(rel_cls)
return iterators

View file

@ -264,7 +264,8 @@ class DeferredAttribute:
f"Cannot retrieve deferred field {field_name!r} "
"from an unsaved model."
)
instance.refresh_from_db(fields=[field_name])
instance._state.fetch_mode.fetch(self, instance)
else:
data[field_name] = val
return data[field_name]
@ -281,6 +282,20 @@ class DeferredAttribute:
return getattr(instance, link_field.attname)
return None
def fetch_one(self, instance):
instance.refresh_from_db(fields=[self.field.attname])
def fetch_many(self, instances):
attname = self.field.attname
db = instances[0]._state.db
value_by_pk = (
self.field.model._base_manager.using(db)
.values_list(attname)
.in_bulk({i.pk for i in instances})
)
for instance in instances:
setattr(instance, attname, value_by_pk[instance.pk])
class class_or_instance_method:
"""

View file

@ -53,8 +53,8 @@ Django version Python versions
4.2 3.8, 3.9, 3.10, 3.11, 3.12 (added in 4.2.8)
5.0 3.10, 3.11, 3.12
5.1 3.10, 3.11, 3.12, 3.13 (added in 5.1.3)
5.2 3.10, 3.11, 3.12, 3.13
6.0 3.12, 3.13
5.2 3.10, 3.11, 3.12, 3.13, 3.14 (added in 5.2.8)
6.0 3.12, 3.13, 3.14
6.1 3.12, 3.13, 3.14
============== ===============

View file

@ -2,7 +2,7 @@
How to install Django on Windows
================================
This document will guide you through installing Python 3.13 and Django on
This document will guide you through installing Python 3.14 and Django on
Windows. It also provides instructions for setting up a virtual environment,
which makes it easier to work on Python projects. This is meant as a beginner's
guide for users working on Django projects and does not reflect how Django
@ -18,7 +18,7 @@ Install Python
==============
Django is a Python web framework, thus requiring Python to be installed on your
machine. At the time of writing, Python 3.13 is the latest version.
machine. At the time of writing, Python 3.14 is the latest version.
To install Python on your machine go to https://www.python.org/downloads/. The
website should offer you a download button for the latest Python version.

View file

@ -137,7 +137,7 @@ Imports
.. console::
$ python -m pip install "isort >= 5.1.0"
$ python -m pip install "isort >= 7.0.0"
$ isort .
This runs ``isort`` recursively from your current directory, modifying any

View file

@ -83,7 +83,7 @@ environments can be seen as follows:
blacken-docs
flake8>=3.7.0
docs
isort>=5.1.0
isort>=7.0.0
lint-docs
Testing other Python versions and database backends
@ -317,7 +317,7 @@ dependencies:
* :pypi:`asgiref` 3.9.1+ (required)
* :pypi:`bcrypt` 4.1.1+
* :pypi:`colorama` 0.4.6+
* :pypi:`docutils` 0.19+
* :pypi:`docutils` 0.22+
* :pypi:`geoip2` 4.8.0+
* :pypi:`Jinja2` 2.11+
* :pypi:`numpy` 1.26.0+

View file

@ -235,6 +235,7 @@ this. For a small app like polls, this process isn't too difficult.
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
]

View file

@ -299,9 +299,15 @@ Related fields
referenced by a ``ForeignKey``.
* **fields.E312**: The ``to_field`` ``<field name>`` doesn't exist on the
related model ``<app label>.<model>``.
* **fields.E320**: Field specifies ``on_delete=SET_NULL``, but cannot be null.
* **fields.E321**: The field specifies ``on_delete=SET_DEFAULT``, but has no
default value.
* **fields.E320**: Field specifies ``on_delete=<set null option>``, but cannot
be null.
* **fields.E321**: Field specifies ``on_delete=SET_DEFAULT``, but has no
``default`` value.
* **fields.E322**: Field specifies ``on_delete=DB_SET_DEFAULT``, but has no
``db_default`` value.
* **fields.E323**: Field specifies database/Python-level on_delete variant, but
referenced model uses python/database-level variant.
* **fields.E324**: ``<database>`` does not support ``DB_SET_DEFAULT``.
* **fields.E330**: ``ManyToManyField``\s cannot be unique.
* **fields.E331**: Field specifies a many-to-many relation through model
``<model>``, which has not been installed.
@ -446,6 +452,8 @@ Models
* **models.E049**: ``constraints/indexes/unique_together`` refers to a
``ForeignObject`` ``<field name>`` with multiple ``from_fields``, which is
not supported for that option.
* **models.E050**: The model cannot have related fields with both
database-level and Python-level ``on_delete`` variants.
Management Commands
-------------------
@ -921,6 +929,8 @@ The following checks are performed when a model contains a
* **contenttypes.E004**: ``<field>`` is not a ``ForeignKey`` to
``contenttypes.ContentType``.
* **contenttypes.E005**: Model names must be at most 100 characters.
* **contenttypes.E006**: ``<field>`` cannot use the database-level
``on_delete`` variant.
``postgres``
------------

View file

@ -23,7 +23,7 @@ the following:
your ``urlpatterns``. Make sure it's included *before* the
``'admin/'`` entry, so that requests to ``/admin/doc/`` don't get
handled by the latter entry.
* Install the :pypi:`docutils` 0.19+ package.
* Install the :pypi:`docutils` 0.22+ package.
* **Optional:** Using the admindocs bookmarklets requires
``django.contrib.admindocs.middleware.XViewMiddleware`` to be installed.

View file

@ -60,7 +60,7 @@ Database Library Requirements Supported Versions Notes
PostgreSQL GEOS, GDAL, PROJ, PostGIS 15+ Requires PostGIS.
MySQL GEOS, GDAL 8.0.11+ :ref:`Limited functionality <mysql-spatial-limitations>`.
Oracle GEOS, GDAL 19+ XE not supported.
SQLite GEOS, GDAL, PROJ, SpatiaLite 3.31.0+ Requires SpatiaLite 4.3+
SQLite GEOS, GDAL, PROJ, SpatiaLite 3.37.0+ Requires SpatiaLite 4.3+
================== ============================== ================== =========================================
See also `this comparison matrix`__ on the OSGeo Wiki for

View file

@ -814,7 +814,7 @@ appropriate typecasting.
SQLite notes
============
Django supports SQLite 3.31.0 and later.
Django supports SQLite 3.37.0 and later.
SQLite_ provides an excellent development alternative for applications that
are predominantly read-only or require a smaller installation footprint. As

View file

@ -165,6 +165,16 @@ Django core exception classes are defined in ``django.core.exceptions``.
- A field name is invalid
- A query contains invalid order_by arguments
``FieldFetchBlocked``
---------------------
.. versionadded:: 6.1
.. exception:: FieldFetchBlocked
Raised when a field would be fetched on-demand and the
:attr:`~django.db.models.RAISE` fetch mode is active.
``ValidationError``
-------------------

View file

@ -182,8 +182,8 @@ Slicing ``F()`` expressions
For string-based fields, text-based fields, and
:class:`~django.contrib.postgres.fields.ArrayField`, you can use Python's
array-slicing syntax. The indices are 0-based and the ``step`` argument to
``slice`` is not supported. For example:
array-slicing syntax. The indices are 0-based. The ``step`` argument to
``slice`` and negative indexing are not supported. For example:
.. code-block:: pycon

View file

@ -1699,11 +1699,11 @@ relation works.
.. attribute:: ForeignKey.on_delete
When an object referenced by a :class:`ForeignKey` is deleted, Django will
emulate the behavior of the SQL constraint specified by the
:attr:`on_delete` argument. For example, if you have a nullable
:class:`ForeignKey` and you want it to be set null when the referenced
object is deleted::
When an object referenced by a :class:`ForeignKey` is deleted, the
referring objects need updating. The :attr:`on_delete` argument specifies
how this is done, and whether Django or your database makes the updates.
For example, if you have a nullable :class:`ForeignKey` and you want Django
to set it to ``None`` when the referenced object is deleted::
user = models.ForeignKey(
User,
@ -1712,8 +1712,21 @@ relation works.
null=True,
)
``on_delete`` doesn't create an SQL constraint in the database. Support for
database-level cascade options :ticket:`may be implemented later <21961>`.
The possible values for :attr:`~ForeignKey.on_delete` are listed below.
Import them from :mod:`django.db.models`. The ``DB_*`` variants use the
database to prevent deletions or update referring objects, whilst the other
values make Django perform the relevant actions.
The database variants are more efficient because they avoid fetching
related objects, but ``pre_delete`` and ``post_delete`` signals won't be
sent when ``DB_CASCADE`` is used.
The database variants cannot be mixed with Python variants (other than
:attr:`DO_NOTHING`) in the same model and in models related to each other.
.. versionchanged:: 6.1
Support for ``DB_*`` variants of the ``on_delete`` attribute was added.
The possible values for :attr:`~ForeignKey.on_delete` are found in
:mod:`django.db.models`:
@ -1729,6 +1742,13 @@ The possible values for :attr:`~ForeignKey.on_delete` are found in
:data:`~django.db.models.signals.post_delete` signals are sent for all
deleted objects.
* .. attribute:: DB_CASCADE
.. versionadded:: 6.1
Cascade deletes. Database-level version of :attr:`CASCADE`: the database
deletes referred-to rows and the one containing the ``ForeignKey``.
* .. attribute:: PROTECT
Prevent deletion of the referenced object by raising
@ -1782,11 +1802,30 @@ The possible values for :attr:`~ForeignKey.on_delete` are found in
Set the :class:`ForeignKey` null; this is only possible if
:attr:`~Field.null` is ``True``.
* .. attribute:: DB_SET_NULL
.. versionadded:: 6.1
Set the :class:`ForeignKey` value to ``NULL``. This is only possible if
:attr:`~Field.null` is ``True``. Database-level version of
:attr:`SET_NULL`.
* .. attribute:: SET_DEFAULT
Set the :class:`ForeignKey` to its default value; a default for the
:class:`ForeignKey` must be set.
* .. attribute:: DB_SET_DEFAULT
.. versionadded:: 6.1
Set the :class:`ForeignKey` value to its :attr:`Field.db_default` value,
which must be set. If a row in the referenced table is deleted, the foreign
key values in the referencing table will be updated to their
:attr:`Field.db_default` values.
``DB_SET_DEFAULT`` is not supported on MySQL and MariaDB.
* .. function:: SET()
Set the :class:`ForeignKey` to the value passed to

View file

@ -180,10 +180,10 @@ update, you could write a test similar to this::
obj.refresh_from_db()
self.assertEqual(obj.val, 2)
Note that when deferred fields are accessed, the loading of the deferred
field's value happens through this method. Thus it is possible to customize
the way deferred loading happens. The example below shows how one can reload
all of the instance's fields when a deferred field is reloaded::
When a deferred field is loaded on-demand for a single model instance, the
loading happens through this method. Thus it is possible to customize the way
this loading happens. The example below shows how one can reload all of the
instance's fields when a deferred field is loaded on-demand::
class ExampleModel(models.Model):
def refresh_from_db(self, using=None, fields=None, **kwargs):
@ -695,7 +695,11 @@ Issues an SQL ``DELETE`` for the object. This only deletes the object in the
database; the Python instance will still exist and will still have data in
its fields, except for the primary key set to ``None``. This method returns the
number of objects deleted and a dictionary with the number of deletions per
object type.
object type. The return value will count instances from related models if
Django is emulating cascade behavior via Python :attr:`~ForeignKey.on_delete`
variants. Otherwise, for database variants such as
:attr:`~django.db.models.DB_CASCADE`, the return value will report only
instances of the :class:`.QuerySet`'s model.
For more details, including how to delete objects in bulk, see
:ref:`topics-db-queries-delete`.
@ -707,6 +711,10 @@ Sometimes with :ref:`multi-table inheritance <multi-table-inheritance>` you may
want to delete only a child model's data. Specifying ``keep_parents=True`` will
keep the parent model's data.
.. versionchanged:: 6.1
Support for the ``DB_*`` variants of ``on_delete`` attribute was added.
Pickling objects
================

View file

@ -1022,15 +1022,38 @@ Uses SQL's ``EXCEPT`` operator to keep only elements present in the
See :meth:`union` for some restrictions.
``fetch_mode()``
~~~~~~~~~~~~~~~~
.. versionadded:: 6.1
.. method:: fetch_mode(mode)
Returns a ``QuerySet`` that sets the given fetch mode for all model instances
created by this ``QuerySet``. The fetch mode controls on-demand loading of
fields when they are accessed, such as for foreign keys and deferred fields.
For example, to use the :attr:`~django.db.models.FETCH_PEERS` mode to
batch-load all related objects on first access:
.. code-block:: python
from django.db import models
books = Book.objects.fetch_mode(models.FETCH_PEERS)
See more in the :doc:`fetch mode topic guide </topics/db/fetch-modes>`.
``select_related()``
~~~~~~~~~~~~~~~~~~~~
.. method:: select_related(*fields)
Returns a ``QuerySet`` that will "follow" foreign-key relationships, selecting
additional related-object data when it executes its query. This is a
performance booster which results in a single more complex query but means
later use of foreign-key relationships won't require database queries.
Returns a ``QuerySet`` that will join in the named foreign-key relationships,
selecting additional related objects when it executes its query. This method
can be a performance booster, fetching data ahead of time rather than
triggering on-demand loading through the model instances'
:doc:`fetch mode </topics/db/fetch-modes>`, at the cost of a more complex
initial query.
The following examples illustrate the difference between plain lookups and
``select_related()`` lookups. Here's standard lookup::
@ -1050,20 +1073,8 @@ And here's ``select_related`` lookup::
# in the previous query.
b = e.blog
You can use ``select_related()`` with any queryset of objects::
from django.utils import timezone
# Find all the blogs with entries scheduled to be published in the future.
blogs = set()
for e in Entry.objects.filter(pub_date__gt=timezone.now()).select_related("blog"):
# Without select_related(), this would make a database query for each
# loop iteration in order to fetch the related blog for each entry.
blogs.add(e.blog)
The order of ``filter()`` and ``select_related()`` chaining isn't important.
These querysets are equivalent::
You can use ``select_related()`` with any queryset. The order of chaining with
other methods isn't important. For example, these querysets are equivalent::
Entry.objects.filter(pub_date__gt=timezone.now()).select_related("blog")
Entry.objects.select_related("blog").filter(pub_date__gt=timezone.now())
@ -1141,12 +1152,15 @@ that is that ``select_related('foo', 'bar')`` is equivalent to
.. method:: prefetch_related(*lookups)
Returns a ``QuerySet`` that will automatically retrieve, in a single batch,
related objects for each of the specified lookups.
Returns a ``QuerySet`` that will automatically retrieve the given lookups, each
in one extra batch query. Prefetching is a way to optimize database access
when you know you'll be accessing related objects later, so you can avoid
triggering the on-demand loading behavior of the model instances'
:doc:`fetch mode </topics/db/fetch-modes>`.
This has a similar purpose to ``select_related``, in that both are designed to
stop the deluge of database queries that is caused by accessing related
objects, but the strategy is quite different.
This method has a similar purpose to :meth:`select_related`, in that both are
designed to eagerly fetch related objects. However, they work in different
ways.
``select_related`` works by creating an SQL join and including the fields of
the related object in the ``SELECT`` statement. For this reason,
@ -2425,8 +2439,8 @@ This has a number of caveats though:
* If the model's primary key is an :class:`~django.db.models.AutoField` or has
a :attr:`~django.db.models.Field.db_default` value, and ``ignore_conflicts``
is ``False``, the primary key attribute can only be retrieved on certain
databases (currently PostgreSQL, MariaDB, and SQLite 3.35+). On other
databases, it will not be set.
databases (currently PostgreSQL, MariaDB, and SQLite). On other databases, it
will not be set.
* It does not work with many-to-many relationships.
* It casts ``objs`` to a list, which fully evaluates ``objs`` if it's a
generator. The cast allows inspecting all objects so that any objects with a
@ -3022,7 +3036,11 @@ unique field in the order that is specified without conflicts. For example::
Performs an SQL delete query on all rows in the :class:`.QuerySet` and
returns the number of objects deleted and a dictionary with the number of
deletions per object type.
deletions per object type. The return value will count instances from related
models if Django is emulating cascade behavior via Python
:attr:`~django.db.models.ForeignKey.on_delete` variants. Otherwise, for
database variants such as :attr:`~django.db.models.DB_CASCADE`, the return
value will report only instances of the :class:`.QuerySet`'s model.
The ``delete()`` is applied instantly. You cannot call ``delete()`` on a
:class:`.QuerySet` that has had a slice taken or can otherwise no longer be
@ -3059,13 +3077,20 @@ The ``delete()`` method does a bulk delete and does not call any ``delete()``
methods on your models. It does, however, emit the
:data:`~django.db.models.signals.pre_delete` and
:data:`~django.db.models.signals.post_delete` signals for all deleted objects
(including cascaded deletions).
(including cascaded deletions). Signals won't be sent when ``DB_CASCADE`` is
used. Also, ``delete()`` doesn't return information about objects deleted from
database variants (``DB_*``) of the
:attr:`~django.db.models.ForeignKey.on_delete` argument, e.g. ``DB_CASCADE``.
Django needs to fetch objects into memory to send signals and handle cascades.
However, if there are no cascades and no signals, then Django may take a
fast-path and delete objects without fetching into memory. For large
deletes this can result in significantly reduced memory usage. The amount of
executed queries can be reduced, too.
Django wont need to fetch objects into memory when deleting them in the
following cases:
#. If related fields use ``DB_*`` options.
#. If there are no cascades and no delete signal receivers.
In these cases, Django may take a fast path and delete objects without fetching
them, which can result in significantly reduced memory usage and fewer executed
queries.
ForeignKeys which are set to :attr:`~django.db.models.ForeignKey.on_delete`
``DO_NOTHING`` do not prevent taking the fast-path in deletion.
@ -3073,6 +3098,10 @@ ForeignKeys which are set to :attr:`~django.db.models.ForeignKey.on_delete`
Note that the queries generated in object deletion is an implementation
detail subject to change.
.. versionchanged:: 6.1
Support for the ``DB_*`` variants of ``on_delete`` attribute was added.
``as_manager()``
~~~~~~~~~~~~~~~~

View file

@ -4,9 +4,14 @@ Django 5.2.8 release notes
*Expected November 5, 2025*
Django 5.2.8 fixes several bugs in 5.2.7.
Django 5.2.8 fixes several bugs in 5.2.7 and adds compatibility with Python
3.14.
Bugfixes
========
* Added compatibility for ``oracledb`` 3.4.0 (:ticket:`36646`).
* Fixed a bug in Django 5.2 where ``QuerySet.first()`` and ``QuerySet.last()``
raised an error on querysets performing aggregation that selected all fields
of a composite primary key.

View file

@ -23,8 +23,9 @@ end in April 2026.
Python compatibility
====================
Django 5.2 supports Python 3.10, 3.11, 3.12, and 3.13. We **highly recommend**
and only officially support the latest release of each series.
Django 5.2 supports Python 3.10, 3.11, 3.12, 3.13, and 3.14 (as of 5.2.8). We
**highly recommend** and only officially support the latest release of each
series.
.. _whats-new-5.2:

View file

@ -18,8 +18,8 @@ project.
Python compatibility
====================
Django 6.0 supports Python 3.12 and 3.13. We **highly recommend**, and only
officially support, the latest release of each series.
Django 6.0 supports Python 3.12, 3.13, and 3.14. We **highly recommend**, and
only officially support, the latest release of each series.
The Django 5.2.x series is the last to support Python 3.10 and 3.11.
@ -377,6 +377,7 @@ of each library are the first to add or confirm compatibility with Python 3.12:
* ``aiosmtpd`` 1.4.5
* ``argon2-cffi`` 23.1.0
* ``bcrypt`` 4.1.1
* ``docutils`` 0.22
* ``geoip2`` 4.8.0
* ``Pillow`` 10.1.0
* ``mysqlclient`` 2.2.1

View file

@ -26,6 +26,66 @@ only officially support, the latest release of each series.
What's new in Django 6.1
========================
Model field fetch modes
-----------------------
The on-demand fetching behavior of model fields is now configurable with
:doc:`fetch modes </topics/db/fetch-modes>`. These modes allow you to control
how Django fetches data from the database when an unfetched field is accessed.
Django provides three fetch modes:
1. ``FETCH_ONE``, the default, fetches the missing field for the current
instance only. This mode represents Django's existing behavior.
2. ``FETCH_PEERS`` fetches a missing field for all instances that came from
the same :class:`~django.db.models.query.QuerySet`.
This mode works like an on-demand ``prefetch_related()``. It can reduce most
cases of the "N+1 queries problem" to two queries without any work to
maintain a list of fields to prefetch.
3. ``RAISE`` raises a :exc:`~django.core.exceptions.FieldFetchBlocked`
exception.
This mode can prevent unintentional queries in performance-critical
sections of code.
Use the new method :meth:`.QuerySet.fetch_mode` to set the fetch mode for model
instances fetched by the ``QuerySet``:
.. code-block:: python
from django.db import models
books = Book.objects.fetch_mode(models.FETCH_PEERS)
for book in books:
print(book.author.name)
Despite the loop accessing the ``author`` foreign key on each instance, the
``FETCH_PEERS`` fetch mode will make the above example perform only two
queries:
1. Fetch all books.
2. Fetch associated authors.
See :doc:`fetch modes </topics/db/fetch-modes>` for more details.
Database-level delete options for ``ForeignKey.on_delete``
----------------------------------------------------------
:attr:`.ForeignKey.on_delete` now supports database-level delete options:
* :attr:`~django.db.models.DB_CASCADE`
* :attr:`~django.db.models.DB_SET_NULL`
* :attr:`~django.db.models.DB_SET_DEFAULT`
These options handle deletion logic entirely within the database, using the SQL
``ON DELETE`` clause. They are thus more efficient than the existing
Python-level options, as Django does not need to load objects before deleting
them. As a consequence, the :attr:`~django.db.models.DB_CASCADE` option does
not trigger the ``pre_delete`` or ``post_delete`` signals.
Minor features
--------------
@ -265,6 +325,8 @@ Miscellaneous
* :class:`~django.contrib.contenttypes.fields.GenericForeignKey` now uses a
separate descriptor class: the private ``GenericForeignKeyDescriptor``.
* The minimum supported version of SQLite is increased from 3.31.0 to 3.37.0.
.. _deprecated-features-6.1:
Features deprecated in 6.1

View file

@ -535,6 +535,7 @@ unencrypted
unescape
unescaped
unevaluated
unfetched
unglamorous
ungrouped
unhandled

View file

@ -0,0 +1,143 @@
===========
Fetch modes
===========
.. versionadded:: 6.1
.. module:: django.db.models.fetch_modes
.. currentmodule:: django.db.models
When accessing model fields that were not loaded as part of the original query,
Django will fetch that field's data from the database. You can customize the
behavior of this fetching with a **fetch mode**, making it more efficient or
even blocking it.
Use :meth:`.QuerySet.fetch_mode` to set the fetch mode for model
instances fetched by a ``QuerySet``:
.. code-block:: python
from django.db import models
books = Book.objects.fetch_mode(models.FETCH_PEERS)
Fetch modes apply to:
* :class:`~django.db.models.ForeignKey` fields
* :class:`~django.db.models.OneToOneField` fields and their reverse accessors
* Fields deferred with :meth:`.QuerySet.defer` or :meth:`.QuerySet.only`
* :ref:`generic-relations`
Django copies the fetch mode of an instance to any related objects it fetches,
so the mode applies to a whole tree of relationships, not just the top-level
model in the initial ``QuerySet``. This copying is also done in related
managers, even though fetch modes don't affect such managers' queries.
Available modes
===============
.. admonition:: Referencing fetch modes
Fetch modes are defined in ``django.db.models.fetch_modes``, but for
convenience they're imported into :mod:`django.db.models`. The standard
convention is to use ``from django.db import models`` and refer to the
fetch modes as ``models.<mode>``.
Django provides three fetch modes. We'll explain them below using these models:
.. code-block:: python
from django.db import models
class Author(models.Model): ...
class Book(models.Model):
author = models.ForeignKey(Author, on_delete=models.CASCADE)
...
…and this loop:
.. code-block:: python
for book in books:
print(book.author.name)
…where ``books`` is a ``QuerySet`` of ``Book`` instances using some fetch mode.
.. attribute:: FETCH_ONE
Fetches the missing field for the current instance only. This is the default
mode.
Using ``FETCH_ONE`` for the above example would use:
* 1 query to fetch ``books``
* N queries, where N is the number of books, to fetch the missing ``author``
field
…for a total of 1+N queries. This query pattern is known as the "N+1 queries
problem" because it often leads to performance issues when N is large.
.. attribute:: FETCH_PEERS
Fetches the missing field for the current instance and its "peers"—instances
that came from the same initial ``QuerySet``. The behavior of this mode is
based on the assumption that if you need a field for one instance, you probably
need it for all instances in the same batch, since you'll likely process them
all identically.
Using ``FETCH_PEERS`` for the above example would use:
* 1 query to fetch ``books``
* 1 query to fetch all missing ``author`` fields for the batch of books
…for a total of 2 queries. The batch query makes this mode a lot more efficient
than ``FETCH_ONE`` and is similar to an on-demand call to
:meth:`.QuerySet.prefetch_related` or
:func:`~django.db.models.prefetch_related_objects`. Using ``FETCH_PEERS`` can
reduce most cases of the "N+1 queries problem" to two queries without
much effort.
The "peer" instances are tracked in a list of weak references, to avoid
memory leaks where some peer instances are discarded.
.. attribute:: RAISE
Raises a :exc:`~django.core.exceptions.FieldFetchBlocked` exception.
Using ``RAISE`` for the above example would raise an exception at the access of
``book.author`` access, like:
.. code-block:: python
FieldFetchBlocked("Fetching of Primary.value blocked.")
This mode can prevent unintentional queries in performance-critical
sections of code.
.. _fetch-modes-custom-manager:
Make a fetch mode the default for a model class
===============================================
Set the default fetch mode for a model class with a
:ref:`custom manager <custom-managers>` that overrides ``get_queryset()``:
.. code-block:: python
from django.db import models
class BookManager(models.Manager):
def get_queryset(self):
return super().get_queryset().fetch_mode(models.FETCH_PEERS)
class Book(models.Model):
title = models.TextField()
author = models.ForeignKey("Author", on_delete=models.CASCADE)
objects = BookManager()

View file

@ -13,6 +13,7 @@ Generally, each model maps to a single database table.
models
queries
fetch-modes
aggregation
search
managers

View file

@ -196,28 +196,47 @@ thousands of records are returned. The penalty will be compounded if the
database lives on a separate server, where network overhead and latency also
play a factor.
Retrieve everything at once if you know you will need it
========================================================
Retrieve related objects efficiently
====================================
Hitting the database multiple times for different parts of a single 'set' of
data that you will need all parts of is, in general, less efficient than
retrieving it all in one query. This is particularly important if you have a
query that is executed in a loop, and could therefore end up doing many
database queries, when only one was needed. So:
Generally, accessing the database multiple times to retrieve different parts
of a single "set" of data is less efficient than retrieving it all in one
query. This is particularly important if you have a query that is executed in a
loop, and could therefore end up doing many database queries, when only one
is needed. Below are some techniques to combine queries for efficiency.
Use the ``FETCH_PEERS`` fetch mode
----------------------------------
Use the :attr:`~django.db.models.FETCH_PEERS` fetch mode to make on-demand
field access more efficient with bulk-fetching. Enable all it for all usage of
your models :ref:`with a custom manager <fetch-modes-custom-manager>`.
Using this fetch mode is easier than declaring fields to fetch with
:meth:`~django.db.models.query.QuerySet.select_related` or
:meth:`~django.db.models.query.QuerySet.prefetch_related`, especially when it's
hard to predict which fields will be accessed.
Use ``QuerySet.select_related()`` and ``prefetch_related()``
------------------------------------------------------------
Understand :meth:`~django.db.models.query.QuerySet.select_related` and
:meth:`~django.db.models.query.QuerySet.prefetch_related` thoroughly, and use
them:
When the :attr:`~django.db.models.FETCH_PEERS` fetch mode is not appropriate or
efficient enough, use :meth:`~django.db.models.query.QuerySet.select_related`
and :meth:`~django.db.models.query.QuerySet.prefetch_related`. Understand their
documentation thoroughly and apply them where needed.
* in :doc:`managers and default managers </topics/db/managers>` where
appropriate. Be aware when your manager is and is not used; sometimes this is
tricky so don't make assumptions.
It may be useful to apply these methods in :doc:`managers and default managers
</topics/db/managers>`. Be aware when your manager is and is not used; for
example, related object access :ref:`uses the base manager
<managers-for-related-objects>` rather than the default manager.
* in view code or other layers, possibly making use of
:func:`~django.db.models.prefetch_related_objects` where needed.
Use ``prefetch_related_objects()``
----------------------------------
Where :attr:`~django.db.models.query.QuerySet.prefetch_related` would be useful
after the queryset has been evaluated, use
:func:`~django.db.models.prefetch_related_objects` to execute an extra
prefetch.
Don't retrieve things you don't need
====================================

View file

@ -1683,15 +1683,15 @@ a join with an ``F()`` object, a ``FieldError`` will be raised:
Related objects
===============
When you define a relationship in a model (i.e., a
When you define a relationship in a model (with
:class:`~django.db.models.ForeignKey`,
:class:`~django.db.models.OneToOneField`, or
:class:`~django.db.models.ManyToManyField`), instances of that model will have
a convenient API to access the related object(s).
:class:`~django.db.models.ManyToManyField`), instances of the model class gain
accessor attributes for the related object(s).
Using the models at the top of this page, for example, an ``Entry`` object
``e`` can get its associated ``Blog`` object by accessing the ``blog``
attribute: ``e.blog``.
``e`` has its associated ``Blog`` object accessible in its ``blog`` attribute:
``e.blog``.
(Behind the scenes, this functionality is implemented by Python
:doc:`descriptors <python:howto/descriptor>`. This shouldn't really matter to
@ -1699,8 +1699,14 @@ you, but we point it out here for the curious.)
Django also creates API accessors for the "other" side of the relationship --
the link from the related model to the model that defines the relationship.
For example, a ``Blog`` object ``b`` has access to a list of all related
``Entry`` objects via the ``entry_set`` attribute: ``b.entry_set.all()``.
For example, a ``Blog`` object ``b`` has a manager that returns all related
``Entry`` objects in the ``entry_set`` attribute: ``b.entry_set.all()``.
These accessors may be prefetched by the ``QuerySet`` methods
:meth:`~django.db.models.query.QuerySet.select_related` or
:meth:`~django.db.models.query.QuerySet.prefetch_related`. If not prefetched,
access will trigger an on-demand fetch through the model's
:doc:`fetch mode </topics/db/fetch-modes>`.
All examples in this section use the sample ``Blog``, ``Author`` and ``Entry``
models defined at the top of this page.

View file

@ -29,6 +29,7 @@ classifiers = [
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI",

View file

@ -133,5 +133,5 @@ class TestUtils(AdminDocsSimpleTestCase):
)
source = "reST, `interpreted text`, default role."
markup = "<p>reST, <cite>interpreted text</cite>, default role.</p>\n"
parts = docutils.core.publish_parts(source=source, writer_name="html4css1")
parts = docutils.core.publish_parts(source=source, writer="html4css1")
self.assertEqual(parts["fragment"], markup)

View file

@ -40,7 +40,7 @@ class ArticleProxy(Article):
proxy = True
class Count(models.Model):
class Cascade(models.Model):
num = models.PositiveSmallIntegerField()
parent = models.ForeignKey("self", models.CASCADE, null=True)
@ -48,6 +48,14 @@ class Count(models.Model):
return str(self.num)
class DBCascade(models.Model):
num = models.PositiveSmallIntegerField()
parent = models.ForeignKey("self", models.DB_CASCADE, null=True)
def __str__(self):
return str(self.num)
class Event(models.Model):
date = models.DateTimeField(auto_now_add=True)

View file

@ -26,7 +26,17 @@ from django.test.utils import isolate_apps
from django.utils.formats import localize
from django.utils.safestring import mark_safe
from .models import Article, Car, Count, Event, EventGuide, Location, Site, Vehicle
from .models import (
Article,
Car,
Cascade,
DBCascade,
Event,
EventGuide,
Location,
Site,
Vehicle,
)
class NestedObjectsTests(TestCase):
@ -34,10 +44,12 @@ class NestedObjectsTests(TestCase):
Tests for ``NestedObject`` utility collection.
"""
cascade_model = Cascade
@classmethod
def setUpTestData(cls):
cls.n = NestedObjects(using=DEFAULT_DB_ALIAS)
cls.objs = [Count.objects.create(num=i) for i in range(5)]
cls.objs = [cls.cascade_model.objects.create(num=i) for i in range(5)]
def _check(self, target):
self.assertEqual(self.n.nested(lambda obj: obj.num), target)
@ -103,6 +115,15 @@ class NestedObjectsTests(TestCase):
n.collect([Vehicle.objects.first()])
class DBNestedObjectsTests(NestedObjectsTests):
"""
Exercise NestedObjectsTests but with a model that makes use of DB_CASCADE
instead of CASCADE to ensure proper collection of objects takes place.
"""
cascade_model = DBCascade
class UtilsTests(SimpleTestCase):
empty_value = "-empty-"

View file

@ -109,9 +109,9 @@ class Tests(TestCase):
connections["default"].close()
self.assertTrue(os.path.isfile(os.path.join(tmp, "test.db")))
@mock.patch.object(connection, "get_database_version", return_value=(3, 30))
@mock.patch.object(connection, "get_database_version", return_value=(3, 36))
def test_check_database_version_supported(self, mocked_get_database_version):
msg = "SQLite 3.31 or later is required (found 3.30)."
msg = "SQLite 3.37 or later is required (found 3.36)."
with self.assertRaisesMessage(NotSupportedError, msg):
connection.check_database_version_supported()
self.assertTrue(mocked_get_database_version.called)

View file

@ -290,6 +290,13 @@ class ModelTest(TestCase):
)
self.assertEqual(Article.objects.get(headline="Article 10"), a10)
def test_create_method_propagates_fetch_mode(self):
article = Article.objects.fetch_mode(models.FETCH_PEERS).create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
self.assertEqual(article._state.fetch_mode, models.FETCH_PEERS)
def test_year_lookup_edge_case(self):
# Edge-case test: A year lookup should retrieve all objects in
# the given year, including Jan. 1 and Dec. 31.
@ -807,6 +814,7 @@ class ManagerTest(SimpleTestCase):
"alatest",
"aupdate",
"aupdate_or_create",
"fetch_mode",
]
def test_manager_methods(self):

View file

@ -141,3 +141,23 @@ class CompositePKAggregateTests(TestCase):
msg = "Max expression does not support composite primary keys."
with self.assertRaisesMessage(ValueError, msg):
Comment.objects.aggregate(Max("pk"))
def test_first_from_unordered_queryset_aggregation_pk_selected(self):
self.assertEqual(
Comment.objects.values("pk").annotate(max=Max("id")).first(),
{"pk": (self.comment_1.tenant_id, 1), "max": 1},
)
def test_first_from_unordered_queryset_aggregation_pk_selected_separately(self):
self.assertEqual(
Comment.objects.values("tenant", "id").annotate(max=Max("id")).first(),
{"tenant": self.comment_1.tenant_id, "id": 1, "max": 1},
)
def test_first_from_unordered_queryset_aggregation_pk_incomplete(self):
msg = (
"Cannot use QuerySet.first() on an unordered queryset performing "
"aggregation. Add an ordering with order_by()."
)
with self.assertRaisesMessage(TypeError, msg):
Comment.objects.values("tenant").annotate(max=Max("id")).first()

View file

@ -80,6 +80,27 @@ class GenericForeignKeyTests(SimpleTestCase):
],
)
def test_content_type_db_on_delete(self):
class Model(models.Model):
content_type = models.ForeignKey(ContentType, models.DB_CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
field = Model._meta.get_field("content_object")
self.assertEqual(
field.check(),
[
checks.Error(
"'Model.content_type' cannot use the database-level on_delete "
"variant.",
hint="Change the on_delete rule to the non-database variant.",
obj=field,
id="contenttypes.E006",
)
],
)
def test_missing_object_id_field(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)

View file

@ -8,7 +8,7 @@ from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpRequest
from django.test import TestCase, override_settings
from .models import (
from .models import ( # isort:skip
Article,
Author,
FooWithBrokenAbsoluteUrl,
@ -17,9 +17,9 @@ from .models import (
ModelWithM2MToSite,
ModelWithNullFKToSite,
SchemeIncludedURL,
Site as MockSite,
UUIDModel,
)
from .models import Site as MockSite
from .models import UUIDModel
@override_settings(ROOT_URLCONF="contenttypes_tests.urls")

View file

@ -1,4 +1,5 @@
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.core.exceptions import FieldDoesNotExist, FieldError, FieldFetchBlocked
from django.db.models import FETCH_PEERS, RAISE
from django.test import SimpleTestCase, TestCase
from .models import (
@ -29,6 +30,7 @@ class DeferTests(AssertionMixin, TestCase):
def setUpTestData(cls):
cls.s1 = Secondary.objects.create(first="x1", second="y1")
cls.p1 = Primary.objects.create(name="p1", value="xx", related=cls.s1)
cls.p2 = Primary.objects.create(name="p2", value="yy", related=cls.s1)
def test_defer(self):
qs = Primary.objects.all()
@ -141,7 +143,6 @@ class DeferTests(AssertionMixin, TestCase):
def test_saving_object_with_deferred_field(self):
# Saving models with deferred fields is possible (but inefficient,
# since every field has to be retrieved first).
Primary.objects.create(name="p2", value="xy", related=self.s1)
obj = Primary.objects.defer("value").get(name="p2")
obj.name = "a new name"
obj.save()
@ -181,10 +182,71 @@ class DeferTests(AssertionMixin, TestCase):
self.assertEqual(obj.name, "adonis")
def test_defer_fk_attname(self):
primary = Primary.objects.defer("related_id").get()
primary = Primary.objects.defer("related_id").get(name="p1")
with self.assertNumQueries(1):
self.assertEqual(primary.related_id, self.p1.related_id)
def test_only_fetch_mode_fetch_peers(self):
p1, p2 = Primary.objects.fetch_mode(FETCH_PEERS).only("name")
with self.assertNumQueries(1):
p1.value
with self.assertNumQueries(0):
p2.value
def test_only_fetch_mode_fetch_peers_single(self):
p1 = Primary.objects.fetch_mode(FETCH_PEERS).only("name").get(name="p1")
with self.assertNumQueries(1):
p1.value
def test_defer_fetch_mode_fetch_peers(self):
p1, p2 = Primary.objects.fetch_mode(FETCH_PEERS).defer("value")
with self.assertNumQueries(1):
p1.value
with self.assertNumQueries(0):
p2.value
def test_defer_fetch_mode_fetch_peers_single(self):
p1 = Primary.objects.fetch_mode(FETCH_PEERS).defer("value").get(name="p1")
with self.assertNumQueries(1):
p1.value
def test_only_fetch_mode_raise(self):
p1 = Primary.objects.fetch_mode(RAISE).only("name").get(name="p1")
msg = "Fetching of Primary.value blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
p1.value
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_defer_fetch_mode_raise(self):
p1 = Primary.objects.fetch_mode(RAISE).defer("value").get(name="p1")
msg = "Fetching of Primary.value blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
p1.value
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
class DeferOtherDatabaseTests(TestCase):
databases = {"other"}
@classmethod
def setUpTestData(cls):
cls.s1 = Secondary.objects.using("other").create(first="x1", second="y1")
cls.p1 = Primary.objects.using("other").create(
name="p1", value="xx", related=cls.s1
)
cls.p2 = Primary.objects.using("other").create(
name="p2", value="yy", related=cls.s1
)
def test_defer_fetch_mode_fetch_peers(self):
p1, p2 = Primary.objects.using("other").fetch_mode(FETCH_PEERS).defer("value")
with self.assertNumQueries(1, using="other"):
p1.value
with self.assertNumQueries(0, using="other"):
p2.value
class BigChildDeferTests(AssertionMixin, TestCase):
@classmethod

View file

@ -41,6 +41,46 @@ class RChildChild(RChild):
pass
class RelatedDbOptionGrandParent(models.Model):
pass
class RelatedDbOptionParent(models.Model):
p = models.ForeignKey(RelatedDbOptionGrandParent, models.DB_CASCADE, null=True)
class RelatedDbOption(models.Model):
name = models.CharField(max_length=30)
db_setnull = models.ForeignKey(
RelatedDbOptionParent,
models.DB_SET_NULL,
null=True,
related_name="db_setnull_set",
)
db_cascade = models.ForeignKey(
RelatedDbOptionParent, models.DB_CASCADE, related_name="db_cascade_set"
)
class SetDefaultDbModel(models.Model):
db_setdefault = models.ForeignKey(
RelatedDbOptionParent,
models.DB_SET_DEFAULT,
db_default=models.Value(1),
related_name="db_setdefault_set",
)
db_setdefault_none = models.ForeignKey(
RelatedDbOptionParent,
models.DB_SET_DEFAULT,
db_default=None,
null=True,
related_name="db_setnull_nullable_set",
)
class Meta:
required_db_features = {"supports_on_delete_db_default"}
class A(models.Model):
name = models.CharField(max_length=30)
@ -119,6 +159,15 @@ def create_a(name):
return a
def create_related_db_option(name):
a = RelatedDbOption(name=name)
for name in ["db_setnull", "db_cascade"]:
r = RelatedDbOptionParent.objects.create()
setattr(a, name, r)
a.save()
return a
class M(models.Model):
m2m = models.ManyToManyField(R, related_name="m_set")
m2m_through = models.ManyToManyField(R, through="MR", related_name="m_through_set")

View file

@ -34,11 +34,16 @@ from .models import (
RChild,
RChildChild,
Referrer,
RelatedDbOption,
RelatedDbOptionGrandParent,
RelatedDbOptionParent,
RProxy,
S,
SetDefaultDbModel,
T,
User,
create_a,
create_related_db_option,
get_default_r,
)
@ -76,18 +81,48 @@ class OnDeleteTests(TestCase):
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setnull)
def test_db_setnull(self):
a = create_related_db_option("db_setnull")
a.db_setnull.delete()
a = RelatedDbOption.objects.get(pk=a.pk)
self.assertIsNone(a.db_setnull)
def test_setdefault(self):
a = create_a("setdefault")
a.setdefault.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setdefault.pk)
@skipUnlessDBFeature("supports_on_delete_db_default")
def test_db_setdefault(self):
# Object cannot be created on the module initialization, use hardcoded
# PKs instead.
r = RelatedDbOptionParent.objects.create(pk=2)
default_r = RelatedDbOptionParent.objects.create(pk=1)
set_default_db_obj = SetDefaultDbModel.objects.create(db_setdefault=r)
set_default_db_obj.db_setdefault.delete()
set_default_db_obj = SetDefaultDbModel.objects.get(pk=set_default_db_obj.pk)
self.assertEqual(set_default_db_obj.db_setdefault, default_r)
def test_setdefault_none(self):
a = create_a("setdefault_none")
a.setdefault_none.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setdefault_none)
@skipUnlessDBFeature("supports_on_delete_db_default")
def test_db_setdefault_none(self):
# Object cannot be created on the module initialization, use hardcoded
# PKs instead.
r = RelatedDbOptionParent.objects.create(pk=2)
default_r = RelatedDbOptionParent.objects.create(pk=1)
set_default_db_obj = SetDefaultDbModel.objects.create(
db_setdefault_none=r, db_setdefault=default_r
)
set_default_db_obj.db_setdefault_none.delete()
set_default_db_obj = SetDefaultDbModel.objects.get(pk=set_default_db_obj.pk)
self.assertIsNone(set_default_db_obj.db_setdefault_none)
def test_cascade(self):
a = create_a("cascade")
a.cascade.delete()
@ -359,6 +394,22 @@ class DeletionTests(TestCase):
self.assertNumQueries(5, s.delete)
self.assertFalse(S.objects.exists())
def test_db_cascade(self):
related_db_op = RelatedDbOptionParent.objects.create(
p=RelatedDbOptionGrandParent.objects.create()
)
RelatedDbOption.objects.bulk_create(
[
RelatedDbOption(db_cascade=related_db_op)
for _ in range(2 * GET_ITERATOR_CHUNK_SIZE)
]
)
with self.assertNumQueries(1):
results = related_db_op.delete()
self.assertEqual(results, (1, {"delete.RelatedDbOptionParent": 1}))
self.assertFalse(RelatedDbOption.objects.exists())
self.assertFalse(RelatedDbOptionParent.objects.exists())
def test_instance_update(self):
deleted = []
related_setnull_sets = []

View file

@ -11,6 +11,7 @@ from pathlib import Path
from django.core.files.storage import FileSystemStorage, default_storage
from django.db import models
from django.utils.functional import LazyObject
class CustomValidNameStorage(FileSystemStorage):
@ -37,6 +38,11 @@ class CallableStorage(FileSystemStorage):
return self
class LazyTempStorage(LazyObject):
def _setup(self):
self._wrapped = temp_storage
class Storage(models.Model):
def custom_upload_to(self, filename):
return "foo"
@ -82,3 +88,4 @@ class Storage(models.Model):
extended_length = models.FileField(
storage=temp_storage, upload_to="tests", max_length=1024
)
lazy_storage = models.FileField(storage=LazyTempStorage(), upload_to="tests")

View file

@ -29,6 +29,7 @@ from django.test.utils import requires_tz_support
from django.urls import NoReverseMatch, reverse_lazy
from django.utils import timezone
from django.utils._os import symlinks_supported
from django.utils.functional import empty
from .models import (
Storage,
@ -1267,3 +1268,11 @@ class StorageHandlerTests(SimpleTestCase):
)
with self.assertRaisesMessage(InvalidStorageError, msg):
test_storages["invalid_backend"]
class StorageLazyObjectTests(SimpleTestCase):
def test_lazy_object_is_not_evaluated_before_manual_access(self):
obj = Storage()
self.assertIs(obj.lazy_storage.storage._wrapped, empty)
# assertEqual triggers resolution.
self.assertEqual(obj.lazy_storage.storage, temp_storage)

View file

@ -5,6 +5,7 @@ from operator import attrgetter
from django.core.exceptions import FieldError, ValidationError
from django.db import connection, models
from django.db.models import FETCH_PEERS
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext, isolate_apps
from django.utils import translation
@ -603,6 +604,42 @@ class MultiColumnFKTests(TestCase):
[m4],
)
def test_fetch_mode_copied_forward_fetching_one(self):
person = Person.objects.fetch_mode(FETCH_PEERS).get(pk=self.bob.pk)
self.assertEqual(person._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
person.person_country._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_forward_fetching_many(self):
people = list(Person.objects.fetch_mode(FETCH_PEERS))
person = people[0]
self.assertEqual(person._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
person.person_country._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_one(self):
country = Country.objects.fetch_mode(FETCH_PEERS).get(pk=self.usa.pk)
self.assertEqual(country._state.fetch_mode, FETCH_PEERS)
person = country.person_set.get(pk=self.bob.pk)
self.assertEqual(
person._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_many(self):
countries = list(Country.objects.fetch_mode(FETCH_PEERS))
country = countries[0]
self.assertEqual(country._state.fetch_mode, FETCH_PEERS)
person = country.person_set.earliest("pk")
self.assertEqual(
person._state.fetch_mode,
FETCH_PEERS,
)
class TestModelCheckTests(SimpleTestCase):
@isolate_apps("foreign_object")

View file

@ -1,7 +1,8 @@
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.prefetch import GenericPrefetch
from django.core.exceptions import FieldError
from django.core.exceptions import FieldError, FieldFetchBlocked
from django.db.models import Q, prefetch_related_objects
from django.db.models.fetch_modes import FETCH_PEERS, RAISE
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from .models import (
@ -780,6 +781,76 @@ class GenericRelationsTests(TestCase):
self.platypus.latin_name,
)
def test_fetch_mode_fetch_peers(self):
TaggedItem.objects.bulk_create(
[
TaggedItem(tag="lion", content_object=self.lion),
TaggedItem(tag="platypus", content_object=self.platypus),
TaggedItem(tag="quartz", content_object=self.quartz),
]
)
# Peers fetching should fetch all related peers GFKs at once which is
# one query per content type.
with self.assertNumQueries(1):
quartz_tag, platypus_tag, lion_tag = TaggedItem.objects.fetch_mode(
FETCH_PEERS
).order_by("-pk")[:3]
with self.assertNumQueries(2):
self.assertEqual(lion_tag.content_object, self.lion)
with self.assertNumQueries(0):
self.assertEqual(platypus_tag.content_object, self.platypus)
self.assertEqual(quartz_tag.content_object, self.quartz)
# It should ignore already cached instances though.
with self.assertNumQueries(1):
quartz_tag, platypus_tag, lion_tag = TaggedItem.objects.fetch_mode(
FETCH_PEERS
).order_by("-pk")[:3]
with self.assertNumQueries(2):
self.assertEqual(quartz_tag.content_object, self.quartz)
self.assertEqual(lion_tag.content_object, self.lion)
with self.assertNumQueries(0):
self.assertEqual(platypus_tag.content_object, self.platypus)
self.assertEqual(quartz_tag.content_object, self.quartz)
def test_fetch_mode_raise(self):
tag = TaggedItem.objects.fetch_mode(RAISE).get(tag="yellow")
msg = "Fetching of TaggedItem.content_object blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
tag.content_object
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_fetch_mode_copied_forward_fetching_one(self):
tag = TaggedItem.objects.fetch_mode(FETCH_PEERS).get(tag="yellow")
self.assertEqual(tag.content_object, self.lion)
self.assertEqual(
tag.content_object._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_forward_fetching_many(self):
tags = list(TaggedItem.objects.fetch_mode(FETCH_PEERS).order_by("tag"))
tag = [t for t in tags if t.tag == "yellow"][0]
self.assertEqual(tag.content_object, self.lion)
self.assertEqual(
tag.content_object._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_one(self):
animal = Animal.objects.fetch_mode(FETCH_PEERS).get(pk=self.lion.pk)
self.assertEqual(animal._state.fetch_mode, FETCH_PEERS)
tag = animal.tags.get(tag="yellow")
self.assertEqual(tag._state.fetch_mode, FETCH_PEERS)
def test_fetch_mode_copied_reverse_fetching_many(self):
animals = list(Animal.objects.fetch_mode(FETCH_PEERS))
animal = animals[0]
self.assertEqual(animal._state.fetch_mode, FETCH_PEERS)
tags = list(animal.tags.all())
tag = tags[0]
self.assertEqual(tag._state.fetch_mode, FETCH_PEERS)
class ProxyRelatedModelTest(TestCase):
def test_default_behavior(self):

View file

@ -14,6 +14,7 @@ from django.contrib.gis.geos import (
Polygon,
fromstr,
)
from django.contrib.gis.geos.libgeos import geos_version_tuple
from django.contrib.gis.measure import Area
from django.db import NotSupportedError, connection
from django.db.models import IntegerField, Sum, Value
@ -916,39 +917,46 @@ class GISFunctionsTests(FuncTestMixin, TestCase):
@skipUnlessDBFeature("has_GeometryType_function")
def test_geometry_type(self):
Feature.objects.bulk_create(
[
Feature(name="Point", geom=Point(0, 0)),
Feature(name="LineString", geom=LineString((0, 0), (1, 1))),
Feature(name="Polygon", geom=Polygon(((0, 0), (1, 0), (1, 1), (0, 0)))),
Feature(name="MultiPoint", geom=MultiPoint(Point(0, 0), Point(1, 1))),
Feature(
name="MultiLineString",
geom=MultiLineString(
LineString((0, 0), (1, 1)), LineString((1, 1), (2, 2))
),
test_features = [
Feature(name="Point", geom=Point(0, 0)),
Feature(name="LineString", geom=LineString((0, 0), (1, 1))),
Feature(name="Polygon", geom=Polygon(((0, 0), (1, 0), (1, 1), (0, 0)))),
Feature(
name="MultiLineString",
geom=MultiLineString(
LineString((0, 0), (1, 1)), LineString((1, 1), (2, 2))
),
Feature(
name="MultiPolygon",
geom=MultiPolygon(
Polygon(((0, 0), (1, 0), (1, 1), (0, 0))),
Polygon(((1, 1), (2, 1), (2, 2), (1, 1))),
),
),
Feature(
name="MultiPolygon",
geom=MultiPolygon(
Polygon(((0, 0), (1, 0), (1, 1), (0, 0))),
Polygon(((1, 1), (2, 1), (2, 2), (1, 1))),
),
]
)
expected_results = {
),
]
expected_results = [
("POINT", Point),
("LINESTRING", LineString),
("POLYGON", Polygon),
("MULTIPOINT", MultiPoint),
("MULTILINESTRING", MultiLineString),
("MULTIPOLYGON", MultiPolygon),
}
for geom_type, geom_class in expected_results:
with self.subTest(geom_type=geom_type):
]
# GEOSWKTWriter_write() behavior was changed in GEOS 3.12+ to include
# parentheses for sub-members. MariaDB doesn't accept WKT
# representations with additional parentheses for MultiPoint. This is
# an accepted bug (MDEV-36166) in MariaDB that should be fixed in the
# future.
if not connection.ops.mariadb or geos_version_tuple() < (3, 12):
test_features.append(
Feature(name="MultiPoint", geom=MultiPoint(Point(0, 0), Point(1, 1)))
)
expected_results.append(("MULTIPOINT", MultiPoint))
for test_feature, (geom_type, geom_class) in zip(
test_features, expected_results, strict=True
):
with self.subTest(geom_type=geom_type, geom=test_feature.geom.wkt):
test_feature.save()
obj = (
Feature.objects.annotate(
geometry_type=functions.GeometryType("geom")

View file

@ -41,10 +41,7 @@ class GEOSIOTest(SimpleTestCase):
def test02_wktwriter(self):
# Creating a WKTWriter instance, testing its ptr property.
wkt_w = WKTWriter()
msg = (
"Incompatible pointer type: "
"<class 'django.contrib.gis.geos.prototypes.io.LP_WKTReader_st'>."
)
msg = "Incompatible pointer type: "
with self.assertRaisesMessage(TypeError, msg):
wkt_w.ptr = WKTReader.ptr_type()

View file

@ -100,10 +100,15 @@ class RelatedGeoModelTest(TestCase):
self.assertEqual(type(u3), MultiPoint)
# Ordering of points in the result of the union is not defined and
# implementation-dependent (DB backend, GEOS version)
self.assertEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u1})
self.assertEqual({p.ewkt for p in ref_u2}, {p.ewkt for p in u2})
self.assertEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u3})
# implementation-dependent (DB backend, GEOS version).
tests = [
(u1, ref_u1),
(u2, ref_u2),
(u3, ref_u1),
]
for union, ref in tests:
for point, ref_point in zip(sorted(union), sorted(ref), strict=True):
self.assertIs(point.equals_exact(ref_point, tolerance=6), True)
def test05_select_related_fk_to_subclass(self):
"""

View file

@ -3062,3 +3062,64 @@ class ConstraintsTests(TestCase):
),
],
)
@isolate_apps("invalid_models_tests")
class RelatedFieldTests(SimpleTestCase):
def test_on_delete_python_db_variants(self):
class Artist(models.Model):
pass
class Album(models.Model):
artist = models.ForeignKey(Artist, models.CASCADE)
class Song(models.Model):
album = models.ForeignKey(Album, models.RESTRICT)
artist = models.ForeignKey(Artist, models.DB_CASCADE)
self.assertEqual(
Song.check(databases=self.databases),
[
Error(
"The model cannot have related fields with both database-level and "
"Python-level on_delete variants.",
obj=Song,
id="models.E050",
),
],
)
def test_on_delete_python_db_variants_auto_created(self):
class SharedModel(models.Model):
pass
class Parent(models.Model):
pass
class Child(SharedModel):
parent = models.ForeignKey(Parent, on_delete=models.DB_CASCADE)
self.assertEqual(
Child.check(databases=self.databases),
[
Error(
"The model cannot have related fields with both database-level and "
"Python-level on_delete variants.",
obj=Child,
id="models.E050",
),
],
)
def test_on_delete_db_do_nothing(self):
class Artist(models.Model):
pass
class Album(models.Model):
artist = models.ForeignKey(Artist, models.CASCADE)
class Song(models.Model):
album = models.ForeignKey(Album, models.DO_NOTHING)
artist = models.ForeignKey(Artist, models.DB_CASCADE)
self.assertEqual(Song.check(databases=self.databases), [])

View file

@ -3,7 +3,8 @@ from unittest import mock
from django.core.checks import Error
from django.core.checks import Warning as DjangoWarning
from django.db import connection, models
from django.test.testcases import SimpleTestCase
from django.test import skipUnlessDBFeature
from django.test.testcases import SimpleTestCase, TestCase
from django.test.utils import isolate_apps, modify_settings, override_settings
@ -751,6 +752,29 @@ class RelativeFieldTests(SimpleTestCase):
],
)
def test_on_delete_db_set_null_on_non_nullable_field(self):
class Person(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey("Person", models.DB_SET_NULL)
field = Model._meta.get_field("foreign_key")
self.assertEqual(
field.check(),
[
Error(
"Field specifies on_delete=DB_SET_NULL, but cannot be null.",
hint=(
"Set null=True argument on the field, or change the on_delete "
"rule."
),
obj=field,
id="fields.E320",
),
],
)
def test_on_delete_set_default_without_default_value(self):
class Person(models.Model):
pass
@ -2259,3 +2283,175 @@ class M2mThroughFieldsTests(SimpleTestCase):
),
],
)
@isolate_apps("invalid_models_tests")
class DatabaseLevelOnDeleteTests(TestCase):
def test_db_set_default_support(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(
Parent, models.DB_SET_DEFAULT, db_default=models.Value(1)
)
field = Child._meta.get_field("parent")
expected = (
[]
if connection.features.supports_on_delete_db_default
else [
Error(
f"{connection.display_name} does not support a DB_SET_DEFAULT.",
hint="Change the on_delete rule to SET_DEFAULT.",
obj=field,
id="fields.E324",
)
]
)
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_set_default_required_db_features(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(
Parent, models.DB_SET_DEFAULT, db_default=models.Value(1)
)
class Meta:
required_db_features = {"supports_on_delete_db_default"}
field = Child._meta.get_field("parent")
self.assertEqual(field.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_on_delete_db_default")
def test_db_set_default_no_db_default(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.DB_SET_DEFAULT)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(databases=self.databases),
[
Error(
"Field specifies on_delete=DB_SET_DEFAULT, but has no db_default "
"value.",
hint="Set a db_default value, or change the on_delete rule.",
obj=field,
id="fields.E322",
)
],
)
def test_python_db_chain(self):
class GrandParent(models.Model):
pass
class Parent(models.Model):
grand_parent = models.ForeignKey(GrandParent, models.DB_CASCADE)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.RESTRICT)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(databases=self.databases),
[
Error(
"Field specifies Python-level on_delete variant, but referenced "
"model uses database-level variant.",
hint=(
"Use either database or Python on_delete variants uniformly in "
"the references chain."
),
obj=field,
id="fields.E323",
)
],
)
def test_db_python_chain(self):
class GrandParent(models.Model):
pass
class Parent(models.Model):
grand_parent = models.ForeignKey(GrandParent, models.CASCADE)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.DB_SET_NULL, null=True)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(databases=self.databases),
[
Error(
"Field specifies database-level on_delete variant, but referenced "
"model uses Python-level variant.",
hint=(
"Use either database or Python on_delete variants uniformly in "
"the references chain."
),
obj=field,
id="fields.E323",
)
],
)
def test_db_python_chain_auto_created(self):
class GrandParent(models.Model):
pass
class Parent(GrandParent):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, on_delete=models.DB_CASCADE)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(databases=self.databases),
[
Error(
"Field specifies database-level on_delete variant, but referenced "
"model uses Python-level variant.",
hint=(
"Use either database or Python on_delete variants uniformly in "
"the references chain."
),
obj=field,
id="fields.E323",
)
],
)
def test_db_do_nothing_chain(self):
class GrandParent(models.Model):
pass
class Parent(models.Model):
grand_parent = models.ForeignKey(GrandParent, models.DO_NOTHING)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.DB_SET_NULL, null=True)
field = Child._meta.get_field("parent")
self.assertEqual(field.check(databases=self.databases), [])
def test_do_nothing_db_chain(self):
class GrandParent(models.Model):
pass
class Parent(models.Model):
grand_parent = models.ForeignKey(GrandParent, models.DB_SET_NULL, null=True)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.DO_NOTHING)
field = Child._meta.get_field("parent")
self.assertEqual(field.check(databases=self.databases), [])

View file

@ -258,7 +258,7 @@ class MailTests(MailTestsMixin, SimpleTestCase):
`surrogateescape`.
Following https://github.com/python/cpython/issues/76511, newer
versions of Python (3.12.3 and 3.13) ensure that a message's
versions of Python (3.12.3 and 3.13+) ensure that a message's
payload is encoded with the provided charset and `surrogateescape` is
used as the error handling strategy.

View file

@ -1,6 +1,7 @@
from unittest import mock
from django.db import connection, transaction
from django.db.models import FETCH_PEERS
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
@ -589,6 +590,46 @@ class ManyToManyTests(TestCase):
querysets=[Publication.objects.all(), Publication.objects.all()],
)
def test_fetch_mode_copied_forward_fetching_one(self):
a = Article.objects.fetch_mode(FETCH_PEERS).get(pk=self.a1.pk)
self.assertEqual(a._state.fetch_mode, FETCH_PEERS)
p = a.publications.earliest("pk")
self.assertEqual(
p._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_forward_fetching_many(self):
articles = list(Article.objects.fetch_mode(FETCH_PEERS))
a = articles[0]
self.assertEqual(a._state.fetch_mode, FETCH_PEERS)
publications = list(a.publications.all())
p = publications[0]
self.assertEqual(
p._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_one(self):
p1 = Publication.objects.fetch_mode(FETCH_PEERS).get(pk=self.p1.pk)
self.assertEqual(p1._state.fetch_mode, FETCH_PEERS)
a = p1.article_set.earliest("pk")
self.assertEqual(
a._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_many(self):
publications = list(Publication.objects.fetch_mode(FETCH_PEERS))
p = publications[0]
self.assertEqual(p._state.fetch_mode, FETCH_PEERS)
articles = list(p.article_set.all())
a = articles[0]
self.assertEqual(
a._state.fetch_mode,
FETCH_PEERS,
)
class ManyToManyQueryTests(TestCase):
"""

View file

@ -1,8 +1,13 @@
import datetime
from copy import deepcopy
from django.core.exceptions import FieldError, MultipleObjectsReturned
from django.core.exceptions import (
FieldError,
FieldFetchBlocked,
MultipleObjectsReturned,
)
from django.db import IntegrityError, models, transaction
from django.db.models import FETCH_PEERS, RAISE
from django.test import TestCase
from django.utils.translation import gettext_lazy
@ -916,3 +921,72 @@ class ManyToOneTests(TestCase):
instances=countries,
querysets=[City.objects.all(), City.objects.all()],
)
def test_fetch_mode_fetch_peers_forward(self):
Article.objects.create(
headline="This is another test",
pub_date=datetime.date(2005, 7, 27),
reporter=self.r2,
)
a1, a2 = Article.objects.fetch_mode(FETCH_PEERS)
with self.assertNumQueries(1):
a1.reporter
with self.assertNumQueries(0):
a2.reporter
def test_fetch_mode_raise_forward(self):
a = Article.objects.fetch_mode(RAISE).get(pk=self.a.pk)
msg = "Fetching of Article.reporter blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
a.reporter
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_fetch_mode_copied_forward_fetching_one(self):
a1 = Article.objects.fetch_mode(FETCH_PEERS).get()
self.assertEqual(a1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
a1.reporter._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_forward_fetching_many(self):
Article.objects.create(
headline="This is another test",
pub_date=datetime.date(2005, 7, 27),
reporter=self.r2,
)
a1, a2 = Article.objects.fetch_mode(FETCH_PEERS)
self.assertEqual(a1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
a1.reporter._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_one(self):
r1 = Reporter.objects.fetch_mode(FETCH_PEERS).get(pk=self.r.pk)
self.assertEqual(r1._state.fetch_mode, FETCH_PEERS)
article = r1.article_set.get()
self.assertEqual(
article._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_many(self):
Article.objects.create(
headline="This is another test",
pub_date=datetime.date(2005, 7, 27),
reporter=self.r2,
)
r1, r2 = Reporter.objects.fetch_mode(FETCH_PEERS)
self.assertEqual(r1._state.fetch_mode, FETCH_PEERS)
a1 = r1.article_set.get()
self.assertEqual(
a1._state.fetch_mode,
FETCH_PEERS,
)
a2 = r2.article_set.get()
self.assertEqual(
a2._state.fetch_mode,
FETCH_PEERS,
)

View file

@ -971,6 +971,23 @@ class WriterTests(SimpleTestCase):
("('models.Model', {'from django.db import models'})", set()),
)
def test_database_on_delete_serializer_value(self):
db_level_on_delete_options = [
models.DB_CASCADE,
models.DB_SET_DEFAULT,
models.DB_SET_NULL,
]
for option in db_level_on_delete_options:
self.assertSerializedEqual(option)
self.assertSerializedResultEqual(
MigrationWriter.serialize(option),
(
f"('django.db.models.deletion.{option.__name__}', "
"{'import django.db.models.deletion'})",
set(),
),
)
def test_simple_migration(self):
"""
Tests serializing a simple migration.

View file

@ -7,6 +7,7 @@ from operator import attrgetter
from unittest import expectedFailure
from django import forms
from django.db.models import FETCH_PEERS
from django.test import TestCase
from .models import (
@ -600,6 +601,22 @@ class ModelInheritanceTest(TestCase):
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
def test_parent_access_copies_fetch_mode(self):
italian_restaurant = ItalianRestaurant.objects.create(
name="Mom's Spaghetti",
address="2131 Woodward Ave",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
)
# No queries are made when accessing the parent objects.
italian_restaurant = ItalianRestaurant.objects.fetch_mode(FETCH_PEERS).get(
pk=italian_restaurant.pk
)
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant._state.fetch_mode, FETCH_PEERS)
def test_id_field_update_on_ancestor_change(self):
place1 = Place.objects.create(name="House of Pasta", address="944 Fullerton")
place2 = Place.objects.create(name="House of Pizza", address="954 Fullerton")

View file

@ -1,4 +1,6 @@
from django.core.exceptions import FieldFetchBlocked
from django.db import IntegrityError, connection, transaction
from django.db.models import FETCH_PEERS, RAISE
from django.test import TestCase
from .models import (
@ -619,3 +621,77 @@ class OneToOneTests(TestCase):
instances=places,
querysets=[Bar.objects.all(), Bar.objects.all()],
)
def test_fetch_mode_fetch_peers_forward(self):
Restaurant.objects.create(
place=self.p2, serves_hot_dogs=True, serves_pizza=False
)
r1, r2 = Restaurant.objects.fetch_mode(FETCH_PEERS)
with self.assertNumQueries(1):
r1.place
with self.assertNumQueries(0):
r2.place
def test_fetch_mode_fetch_peers_reverse(self):
Restaurant.objects.create(
place=self.p2, serves_hot_dogs=True, serves_pizza=False
)
p1, p2 = Place.objects.fetch_mode(FETCH_PEERS)
with self.assertNumQueries(1):
p1.restaurant
with self.assertNumQueries(0):
p2.restaurant
def test_fetch_mode_raise_forward(self):
r = Restaurant.objects.fetch_mode(RAISE).get(pk=self.r1.pk)
msg = "Fetching of Restaurant.place blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
r.place
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_fetch_mode_raise_reverse(self):
p = Place.objects.fetch_mode(RAISE).get(pk=self.p1.pk)
msg = "Fetching of Place.restaurant blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
p.restaurant
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_fetch_mode_copied_forward_fetching_one(self):
r1 = Restaurant.objects.fetch_mode(FETCH_PEERS).get(pk=self.r1.pk)
self.assertEqual(r1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
r1.place._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_forward_fetching_many(self):
Restaurant.objects.create(
place=self.p2, serves_hot_dogs=True, serves_pizza=False
)
r1, r2 = Restaurant.objects.fetch_mode(FETCH_PEERS)
self.assertEqual(r1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
r1.place._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_one(self):
p1 = Place.objects.fetch_mode(FETCH_PEERS).get(pk=self.p1.pk)
self.assertEqual(p1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
p1.restaurant._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_reverse_fetching_many(self):
Restaurant.objects.create(
place=self.p2, serves_hot_dogs=True, serves_pizza=False
)
p1, p2 = Place.objects.fetch_mode(FETCH_PEERS)
self.assertEqual(p1._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
p1.restaurant._state.fetch_mode,
FETCH_PEERS,
)

View file

@ -3,7 +3,14 @@ from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import NotSupportedError, connection
from django.db.models import F, Prefetch, QuerySet, prefetch_related_objects
from django.db.models import (
FETCH_PEERS,
F,
Prefetch,
QuerySet,
prefetch_related_objects,
)
from django.db.models.fetch_modes import RAISE
from django.db.models.query import get_prefetcher
from django.db.models.sql import Query
from django.test import (
@ -107,6 +114,32 @@ class PrefetchRelatedTests(TestDataMixin, TestCase):
normal_books = [a.first_book for a in Author.objects.all()]
self.assertEqual(books, normal_books)
def test_fetch_mode_copied_fetching_one(self):
author = (
Author.objects.fetch_mode(FETCH_PEERS)
.prefetch_related("first_book")
.get(pk=self.author1.pk)
)
self.assertEqual(author._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
author.first_book._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_fetching_many(self):
authors = list(
Author.objects.fetch_mode(FETCH_PEERS).prefetch_related("first_book")
)
self.assertEqual(authors[0]._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
authors[0].first_book._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_raise(self):
authors = list(Author.objects.fetch_mode(RAISE).prefetch_related("first_book"))
authors[0].first_book # No exception, already loaded
def test_foreignkey_reverse(self):
with self.assertNumQueries(2):
[

View file

@ -351,6 +351,29 @@ class PickleabilityTestCase(TestCase):
event.edition_set.create()
self.assert_pickles(event.edition_set.order_by("event"))
def test_fetch_mode_fetch_one(self):
restored = pickle.loads(pickle.dumps(self.happening))
self.assertIs(restored._state.fetch_mode, models.FETCH_ONE)
def test_fetch_mode_fetch_peers(self):
Happening.objects.create()
objs = list(Happening.objects.fetch_mode(models.FETCH_PEERS))
self.assertEqual(objs[0]._state.fetch_mode, models.FETCH_PEERS)
self.assertEqual(len(objs[0]._state.peers), 2)
restored = pickle.loads(pickle.dumps(objs))
self.assertIs(restored[0]._state.fetch_mode, models.FETCH_PEERS)
# Peers not restored because weak references are not picklable.
self.assertEqual(restored[0]._state.peers, ())
def test_fetch_mode_raise(self):
objs = list(Happening.objects.fetch_mode(models.RAISE))
self.assertEqual(objs[0]._state.fetch_mode, models.RAISE)
restored = pickle.loads(pickle.dumps(objs))
self.assertIs(restored[0]._state.fetch_mode, models.RAISE)
class InLookupTests(TestCase):
@classmethod

View file

@ -1,7 +1,8 @@
from datetime import date
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist
from django.core.exceptions import FieldDoesNotExist, FieldFetchBlocked
from django.db.models import FETCH_PEERS, RAISE
from django.db.models.query import RawQuerySet
from django.test import TestCase, skipUnlessDBFeature
@ -158,6 +159,22 @@ class RawQueryTests(TestCase):
books = Book.objects.all()
self.assertSuccessfulRawQuery(Book, query, books)
def test_fk_fetch_mode_peers(self):
query = "SELECT * FROM raw_query_book"
books = list(Book.objects.fetch_mode(FETCH_PEERS).raw(query))
with self.assertNumQueries(1):
books[0].author
books[1].author
def test_fk_fetch_mode_raise(self):
query = "SELECT * FROM raw_query_book"
books = list(Book.objects.fetch_mode(RAISE).raw(query))
msg = "Fetching of Book.author blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
books[0].author
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
def test_db_column_handler(self):
"""
Test of a simple raw query against a model containing a field with
@ -294,6 +311,23 @@ class RawQueryTests(TestCase):
with self.assertRaisesMessage(FieldDoesNotExist, msg):
list(Author.objects.raw(query))
def test_missing_fields_fetch_mode_peers(self):
query = "SELECT id, first_name, dob FROM raw_query_author"
authors = list(Author.objects.fetch_mode(FETCH_PEERS).raw(query))
with self.assertNumQueries(1):
authors[0].last_name
authors[1].last_name
def test_missing_fields_fetch_mode_raise(self):
query = "SELECT id, first_name, dob FROM raw_query_author"
authors = list(Author.objects.fetch_mode(RAISE).raw(query))
msg = "Fetching of Author.last_name blocked."
with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm:
authors[0].last_name
self.assertIsNone(cm.exception.__cause__)
self.assertTrue(cm.exception.__suppress_context__)
self.assertTrue(cm.exception.__suppress_context__)
def test_annotations(self):
query = (
"SELECT a.*, count(b.id) as book_count "

View file

@ -3,11 +3,11 @@ asgiref >= 3.9.1
argon2-cffi >= 23.1.0
bcrypt >= 4.1.1
black >= 25.9.0
docutils >= 0.19
docutils >= 0.22
geoip2 >= 4.8.0
jinja2 >= 2.11.0
numpy >= 1.26.0
Pillow >= 10.1.0; sys.platform != 'win32' or python_version < '3.14'
Pillow >= 10.1.0
# pylibmc/libmemcached can't be built on Windows.
pylibmc; sys_platform != 'win32'
pymemcache >= 3.4.0

View file

@ -18,6 +18,8 @@ from django.db import (
from django.db.backends.utils import truncate_name
from django.db.models import (
CASCADE,
DB_CASCADE,
DB_SET_NULL,
PROTECT,
AutoField,
BigAutoField,
@ -410,6 +412,40 @@ class SchemaTests(TransactionTestCase):
]
)
@skipUnlessDBFeature("can_create_inline_fk")
def test_inline_fk_db_on_delete(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.create_model(Note)
self.assertForeignKeyNotExists(Note, "book_id", "schema_book")
# Add a foreign key from model to the other.
with (
CaptureQueriesContext(connection) as ctx,
connection.schema_editor() as editor,
):
new_field = ForeignKey(Book, DB_CASCADE)
new_field.set_attributes_from_name("book")
editor.add_field(Note, new_field)
self.assertForeignKeyExists(Note, "book_id", "schema_book")
# Creating a FK field with a constraint uses a single statement without
# a deferred ALTER TABLE.
self.assertFalse(
[
sql
for sql in (str(statement) for statement in editor.deferred_sql)
if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql
]
)
# ON DELETE clause is used.
self.assertTrue(
any(
capture_query["sql"].startswith("ALTER TABLE")
and "ON DELETE" in capture_query["sql"]
for capture_query in ctx.captured_queries
)
)
@skipUnlessDBFeature("can_create_inline_fk")
def test_add_inline_fk_update_data(self):
with connection.schema_editor() as editor:
@ -566,6 +602,63 @@ class SchemaTests(TransactionTestCase):
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_fk_alter_on_delete(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Change CASCADE to DB_CASCADE.
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, DB_CASCADE)
new_field.set_attributes_from_name("author")
with (
connection.schema_editor() as editor,
CaptureQueriesContext(connection) as ctx,
):
editor.alter_field(Book, old_field, new_field)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
self.assertIs(
any("ON DELETE" in query["sql"] for query in ctx.captured_queries), True
)
# Change DB_CASCADE to CASCADE.
old_field = new_field
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with (
connection.schema_editor() as editor,
CaptureQueriesContext(connection) as ctx,
):
editor.alter_field(Book, old_field, new_field)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
self.assertIs(
any("ON DELETE" in query["sql"] for query in ctx.captured_queries), False
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_create_model_db_on_delete(self):
class Parent(Model):
class Meta:
app_label = "schema"
class Child(Model):
parent_fk = ForeignKey(Parent, DB_SET_NULL, null=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Parent)
with CaptureQueriesContext(connection) as ctx:
with connection.schema_editor() as editor:
editor.create_model(Child)
self.assertForeignKeyNotExists(Child, "parent_id", "schema_parent")
self.assertIs(
any("ON DELETE" in query["sql"] for query in ctx.captured_queries), True
)
@isolate_apps("schema")
def test_no_db_constraint_added_during_primary_key_change(self):
"""
@ -4598,6 +4691,7 @@ class SchemaTests(TransactionTestCase):
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
"on_delete_db": "",
}
)
self.assertIn(
@ -4784,7 +4878,7 @@ class SchemaTests(TransactionTestCase):
error_messages={"invalid": "error message"},
help_text="help text",
limit_choices_to={"limit": "choice"},
on_delete=PROTECT,
on_delete=CASCADE,
related_name="related_name",
related_query_name="related_query_name",
validators=[lambda x: x],

View file

@ -1,4 +1,5 @@
from django.core.exceptions import FieldError
from django.db.models import FETCH_PEERS
from django.test import SimpleTestCase, TestCase
from .models import (
@ -210,6 +211,37 @@ class SelectRelatedTests(TestCase):
with self.assertRaisesMessage(TypeError, message):
list(Species.objects.values_list("name").select_related("genus"))
def test_fetch_mode_copied_fetching_one(self):
fly = (
Species.objects.fetch_mode(FETCH_PEERS)
.select_related("genus__family")
.get(name="melanogaster")
)
self.assertEqual(fly._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
fly.genus._state.fetch_mode,
FETCH_PEERS,
)
self.assertEqual(
fly.genus.family._state.fetch_mode,
FETCH_PEERS,
)
def test_fetch_mode_copied_fetching_many(self):
specieses = list(
Species.objects.fetch_mode(FETCH_PEERS).select_related("genus__family")
)
species = specieses[0]
self.assertEqual(species._state.fetch_mode, FETCH_PEERS)
self.assertEqual(
species.genus._state.fetch_mode,
FETCH_PEERS,
)
self.assertEqual(
species.genus.family._state.fetch_mode,
FETCH_PEERS,
)
class SelectRelatedValidationTests(SimpleTestCase):
"""

View file

@ -27,7 +27,7 @@ setenv =
PYTHONDONTWRITEBYTECODE=1
deps =
-e .
py{3,312,313}: -rtests/requirements/py3.txt
py{3,312,313,314}: -rtests/requirements/py3.txt
postgres: -rtests/requirements/postgres.txt
mysql: -rtests/requirements/mysql.txt
oracle: -rtests/requirements/oracle.txt
@ -75,7 +75,7 @@ commands =
[testenv:isort]
basepython = python3
usedevelop = false
deps = isort >= 5.1.0
deps = isort >= 7.0.0
changedir = {toxinidir}
commands = isort --check-only --diff django tests scripts