mirror of
https://github.com/django/django.git
synced 2025-11-23 12:26:57 +00:00
Merge branch 'main' into ticket_36321
This commit is contained in:
commit
7032a5124a
108 changed files with 1338 additions and 686 deletions
3
.github/workflows/check_commit_messages.yml
vendored
3
.github/workflows/check_commit_messages.yml
vendored
|
|
@ -24,8 +24,9 @@ jobs:
|
||||||
echo "prefix=[$VERSION]" >> $GITHUB_OUTPUT
|
echo "prefix=[$VERSION]" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Check PR title prefix
|
- name: Check PR title prefix
|
||||||
|
env:
|
||||||
|
TITLE: ${{ github.event.pull_request.title }}
|
||||||
run: |
|
run: |
|
||||||
TITLE="${{ github.event.pull_request.title }}"
|
|
||||||
PREFIX="${{ steps.vars.outputs.prefix }}"
|
PREFIX="${{ steps.vars.outputs.prefix }}"
|
||||||
if [[ "$TITLE" != "$PREFIX"* ]]; then
|
if [[ "$TITLE" != "$PREFIX"* ]]; then
|
||||||
echo "❌ PR title must start with the required prefix: $PREFIX"
|
echo "❌ PR title must start with the required prefix: $PREFIX"
|
||||||
|
|
|
||||||
4
.github/workflows/schedule_tests.yml
vendored
4
.github/workflows/schedule_tests.yml
vendored
|
|
@ -98,7 +98,7 @@ jobs:
|
||||||
- name: Run Selenium tests
|
- name: Run Selenium tests
|
||||||
working-directory: ./tests/
|
working-directory: ./tests/
|
||||||
run: |
|
run: |
|
||||||
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_sqlite --parallel 2
|
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_sqlite --parallel 1
|
||||||
|
|
||||||
selenium-postgresql:
|
selenium-postgresql:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
@ -136,7 +136,7 @@ jobs:
|
||||||
- name: Run Selenium tests
|
- name: Run Selenium tests
|
||||||
working-directory: ./tests/
|
working-directory: ./tests/
|
||||||
run: |
|
run: |
|
||||||
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_postgres --parallel 2
|
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_postgres --parallel 1
|
||||||
|
|
||||||
postgresql:
|
postgresql:
|
||||||
strategy:
|
strategy:
|
||||||
|
|
|
||||||
2
.github/workflows/screenshots.yml
vendored
2
.github/workflows/screenshots.yml
vendored
|
|
@ -33,7 +33,7 @@ jobs:
|
||||||
|
|
||||||
- name: Run Selenium tests with screenshots
|
- name: Run Selenium tests with screenshots
|
||||||
working-directory: ./tests/
|
working-directory: ./tests/
|
||||||
run: python -Wall runtests.py --verbosity=2 --noinput --selenium=chrome --headless --screenshots --settings=test_sqlite --parallel=2
|
run: python -Wall runtests.py --verbosity=2 --noinput --selenium=chrome --headless --screenshots --settings=test_sqlite --parallel=1
|
||||||
|
|
||||||
- name: Cache oxipng
|
- name: Cache oxipng
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|
|
||||||
4
.github/workflows/selenium.yml
vendored
4
.github/workflows/selenium.yml
vendored
|
|
@ -35,7 +35,7 @@ jobs:
|
||||||
- name: Run Selenium tests
|
- name: Run Selenium tests
|
||||||
working-directory: ./tests/
|
working-directory: ./tests/
|
||||||
run: |
|
run: |
|
||||||
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_sqlite --parallel 2
|
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_sqlite --parallel 1
|
||||||
|
|
||||||
selenium-postgresql:
|
selenium-postgresql:
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'selenium')
|
if: contains(github.event.pull_request.labels.*.name, 'selenium')
|
||||||
|
|
@ -74,4 +74,4 @@ jobs:
|
||||||
- name: Run Selenium tests
|
- name: Run Selenium tests
|
||||||
working-directory: ./tests/
|
working-directory: ./tests/
|
||||||
run: |
|
run: |
|
||||||
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_postgres --parallel 2
|
python -Wall runtests.py --verbosity 2 --noinput --selenium=chrome --headless --settings=test_postgres --parallel 1
|
||||||
|
|
|
||||||
1
AUTHORS
1
AUTHORS
|
|
@ -159,6 +159,7 @@ answer newbie questions, and generally made Django that much better:
|
||||||
Ben Slavin <benjamin.slavin@gmail.com>
|
Ben Slavin <benjamin.slavin@gmail.com>
|
||||||
Ben Sturmfels <ben@sturm.com.au>
|
Ben Sturmfels <ben@sturm.com.au>
|
||||||
Bendegúz Csirmaz <csirmazbendeguz@gmail.com>
|
Bendegúz Csirmaz <csirmazbendeguz@gmail.com>
|
||||||
|
Benedict Etzel <developer@beheh.de>
|
||||||
Berker Peksag <berker.peksag@gmail.com>
|
Berker Peksag <berker.peksag@gmail.com>
|
||||||
Bernd Schlapsi
|
Bernd Schlapsi
|
||||||
Bernhard Essl <me@bernhardessl.com>
|
Bernhard Essl <me@bernhardessl.com>
|
||||||
|
|
|
||||||
|
|
@ -416,29 +416,27 @@ class AdminSite:
|
||||||
"""
|
"""
|
||||||
Display the login form for the given HttpRequest.
|
Display the login form for the given HttpRequest.
|
||||||
"""
|
"""
|
||||||
if request.method == "GET" and self.has_permission(request):
|
|
||||||
# Already logged-in, redirect to admin index
|
|
||||||
index_path = reverse("admin:index", current_app=self.name)
|
|
||||||
return HttpResponseRedirect(index_path)
|
|
||||||
|
|
||||||
# Since this module gets imported in the application's root package,
|
# Since this module gets imported in the application's root package,
|
||||||
# it cannot import models from other applications at the module level,
|
# it cannot import models from other applications at the module level,
|
||||||
# and django.contrib.admin.forms eventually imports User.
|
# and django.contrib.admin.forms eventually imports User.
|
||||||
from django.contrib.admin.forms import AdminAuthenticationForm
|
from django.contrib.admin.forms import AdminAuthenticationForm
|
||||||
from django.contrib.auth.views import LoginView
|
from django.contrib.auth.views import LoginView
|
||||||
|
|
||||||
|
redirect_url = LoginView().get_redirect_url(request) or reverse(
|
||||||
|
"admin:index", current_app=self.name
|
||||||
|
)
|
||||||
|
if request.method == "GET" and self.has_permission(request):
|
||||||
|
# Already logged-in, redirect accordingly.
|
||||||
|
return HttpResponseRedirect(redirect_url)
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
**self.each_context(request),
|
**self.each_context(request),
|
||||||
"title": _("Log in"),
|
"title": _("Log in"),
|
||||||
"subtitle": None,
|
"subtitle": None,
|
||||||
"app_path": request.get_full_path(),
|
"app_path": request.get_full_path(),
|
||||||
"username": request.user.get_username(),
|
"username": request.user.get_username(),
|
||||||
|
REDIRECT_FIELD_NAME: redirect_url,
|
||||||
}
|
}
|
||||||
if (
|
|
||||||
REDIRECT_FIELD_NAME not in request.GET
|
|
||||||
and REDIRECT_FIELD_NAME not in request.POST
|
|
||||||
):
|
|
||||||
context[REDIRECT_FIELD_NAME] = reverse("admin:index", current_app=self.name)
|
|
||||||
context.update(extra_context or {})
|
context.update(extra_context or {})
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
|
|
|
||||||
|
|
@ -18,23 +18,10 @@ Requires core.js and SelectBox.js.
|
||||||
from_box.setAttribute('aria-labelledby', field_id + '_from_label');
|
from_box.setAttribute('aria-labelledby', field_id + '_from_label');
|
||||||
from_box.setAttribute('aria-describedby', `${field_id}_helptext ${field_id}_choose_helptext`);
|
from_box.setAttribute('aria-describedby', `${field_id}_helptext ${field_id}_choose_helptext`);
|
||||||
|
|
||||||
for (const p of from_box.parentNode.getElementsByTagName('p')) {
|
|
||||||
if (p.classList.contains("info")) {
|
|
||||||
// Remove <p class="info">, because it just gets in the way.
|
|
||||||
from_box.parentNode.removeChild(p);
|
|
||||||
} else if (p.classList.contains("help")) {
|
|
||||||
// Move help text up to the top so it isn't below the select
|
|
||||||
// boxes or wrapped off on the side to the right of the add
|
|
||||||
// button:
|
|
||||||
from_box.parentNode.insertBefore(p, from_box.parentNode.firstChild);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// <div class="selector"> or <div class="selector stacked">
|
// <div class="selector"> or <div class="selector stacked">
|
||||||
const selector_div = quickElement('div', from_box.parentNode);
|
const selector_div = quickElement('div', from_box.parentNode);
|
||||||
// Make sure the selector div is at the beginning so that the
|
// Make sure the selector div appears between the label and the add link.
|
||||||
// add link would be displayed to the right of the widget.
|
from_box.parentNode.insertBefore(selector_div, from_box.nextSibling);
|
||||||
from_box.parentNode.prepend(selector_div);
|
|
||||||
selector_div.className = is_stacked ? 'selector stacked' : 'selector';
|
selector_div.className = is_stacked ? 'selector stacked' : 'selector';
|
||||||
|
|
||||||
// <div class="selector-available">
|
// <div class="selector-available">
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from types import MethodType
|
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
@ -98,7 +97,7 @@ def check_user_model(app_configs, **kwargs):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(cls().is_anonymous, MethodType):
|
if callable(cls().is_anonymous):
|
||||||
errors.append(
|
errors.append(
|
||||||
checks.Critical(
|
checks.Critical(
|
||||||
"%s.is_anonymous must be an attribute or property rather than "
|
"%s.is_anonymous must be an attribute or property rather than "
|
||||||
|
|
@ -108,7 +107,7 @@ def check_user_model(app_configs, **kwargs):
|
||||||
id="auth.C009",
|
id="auth.C009",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if isinstance(cls().is_authenticated, MethodType):
|
if callable(cls().is_authenticated):
|
||||||
errors.append(
|
errors.append(
|
||||||
checks.Critical(
|
checks.Critical(
|
||||||
"%s.is_authenticated must be an attribute or property rather "
|
"%s.is_authenticated must be an attribute or property rather "
|
||||||
|
|
|
||||||
|
|
@ -40,20 +40,28 @@ class RedirectURLMixin:
|
||||||
def get_success_url(self):
|
def get_success_url(self):
|
||||||
return self.get_redirect_url() or self.get_default_redirect_url()
|
return self.get_redirect_url() or self.get_default_redirect_url()
|
||||||
|
|
||||||
def get_redirect_url(self):
|
def get_redirect_url(self, request=None):
|
||||||
"""Return the user-originating redirect URL if it's safe."""
|
"""Return the user-originating redirect URL if it's safe.
|
||||||
redirect_to = self.request.POST.get(
|
|
||||||
self.redirect_field_name, self.request.GET.get(self.redirect_field_name)
|
Optionally takes a request argument, allowing use outside class-based
|
||||||
|
views.
|
||||||
|
"""
|
||||||
|
if request is None:
|
||||||
|
request = self.request
|
||||||
|
redirect_to = request.POST.get(
|
||||||
|
self.redirect_field_name, request.GET.get(self.redirect_field_name)
|
||||||
)
|
)
|
||||||
url_is_safe = url_has_allowed_host_and_scheme(
|
url_is_safe = url_has_allowed_host_and_scheme(
|
||||||
url=redirect_to,
|
url=redirect_to,
|
||||||
allowed_hosts=self.get_success_url_allowed_hosts(),
|
allowed_hosts=self.get_success_url_allowed_hosts(request),
|
||||||
require_https=self.request.is_secure(),
|
require_https=request.is_secure(),
|
||||||
)
|
)
|
||||||
return redirect_to if url_is_safe else ""
|
return redirect_to if url_is_safe else ""
|
||||||
|
|
||||||
def get_success_url_allowed_hosts(self):
|
def get_success_url_allowed_hosts(self, request=None):
|
||||||
return {self.request.get_host(), *self.success_url_allowed_hosts}
|
if request is None:
|
||||||
|
request = self.request
|
||||||
|
return {request.get_host(), *self.success_url_allowed_hosts}
|
||||||
|
|
||||||
def get_default_redirect_url(self):
|
def get_default_redirect_url(self):
|
||||||
"""Return the default redirect URL."""
|
"""Return the default redirect URL."""
|
||||||
|
|
|
||||||
|
|
@ -65,6 +65,11 @@ class PostgresConfig(AppConfig):
|
||||||
3910: "django.contrib.postgres.fields.DateTimeRangeField",
|
3910: "django.contrib.postgres.fields.DateTimeRangeField",
|
||||||
3912: "django.contrib.postgres.fields.DateRangeField",
|
3912: "django.contrib.postgres.fields.DateRangeField",
|
||||||
3926: "django.contrib.postgres.fields.BigIntegerRangeField",
|
3926: "django.contrib.postgres.fields.BigIntegerRangeField",
|
||||||
|
# PostgreSQL OIDs may vary depending on the
|
||||||
|
# installation, especially for datatypes from
|
||||||
|
# extensions, e.g. "hstore". In such cases, the
|
||||||
|
# type_display attribute (psycopg 3.2+) should be used.
|
||||||
|
"hstore": "django.contrib.postgres.fields.HStoreField",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if conn.connection is not None:
|
if conn.connection is not None:
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ class ArrayField(CheckPostgresInstalledMixin, CheckFieldDefaultMixin, Field):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
base_checks = self.base_field.check()
|
base_checks = self.base_field.check(**kwargs)
|
||||||
if base_checks:
|
if base_checks:
|
||||||
error_messages = "\n ".join(
|
error_messages = "\n ".join(
|
||||||
"%s (%s)" % (base_check.msg, base_check.id)
|
"%s (%s)" % (base_check.msg, base_check.id)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
|
from django.db import connections
|
||||||
from django.utils.inspect import func_accepts_kwargs
|
from django.utils.inspect import func_accepts_kwargs
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -84,6 +85,14 @@ class CheckRegistry:
|
||||||
|
|
||||||
if tags is not None:
|
if tags is not None:
|
||||||
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
|
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
|
||||||
|
elif not databases:
|
||||||
|
# By default, 'database'-tagged checks are not run if an alias
|
||||||
|
# is not explicitly specified as they do more than mere static
|
||||||
|
# code analysis.
|
||||||
|
checks = [check for check in checks if Tags.database not in check.tags]
|
||||||
|
|
||||||
|
if databases is None:
|
||||||
|
databases = list(connections)
|
||||||
|
|
||||||
for check in checks:
|
for check in checks:
|
||||||
new_errors = check(app_configs=app_configs, databases=databases)
|
new_errors = check(app_configs=app_configs, databases=databases)
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import logging
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import aclosing
|
from contextlib import aclosing, closing
|
||||||
|
|
||||||
from asgiref.sync import ThreadSensitiveContext, sync_to_async
|
from asgiref.sync import ThreadSensitiveContext, sync_to_async
|
||||||
|
|
||||||
|
|
@ -174,65 +174,41 @@ class ASGIHandler(base.BaseHandler):
|
||||||
body_file = await self.read_body(receive)
|
body_file = await self.read_body(receive)
|
||||||
except RequestAborted:
|
except RequestAborted:
|
||||||
return
|
return
|
||||||
# Request is complete and can be served.
|
|
||||||
set_script_prefix(get_script_prefix(scope))
|
|
||||||
await signals.request_started.asend(sender=self.__class__, scope=scope)
|
|
||||||
# Get the request and check for basic issues.
|
|
||||||
request, error_response = self.create_request(scope, body_file)
|
|
||||||
if request is None:
|
|
||||||
body_file.close()
|
|
||||||
await self.send_response(error_response, send)
|
|
||||||
await sync_to_async(error_response.close)()
|
|
||||||
return
|
|
||||||
|
|
||||||
async def process_request(request, send):
|
with closing(body_file):
|
||||||
response = await self.run_get_response(request)
|
# Request is complete and can be served.
|
||||||
try:
|
set_script_prefix(get_script_prefix(scope))
|
||||||
await self.send_response(response, send)
|
await signals.request_started.asend(sender=self.__class__, scope=scope)
|
||||||
except asyncio.CancelledError:
|
# Get the request and check for basic issues.
|
||||||
# Client disconnected during send_response (ignore exception).
|
request, error_response = self.create_request(scope, body_file)
|
||||||
|
if request is None:
|
||||||
|
body_file.close()
|
||||||
|
await self.send_response(error_response, send)
|
||||||
|
await sync_to_async(error_response.close)()
|
||||||
|
return
|
||||||
|
|
||||||
|
class RequestProcessed(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return response
|
response = None
|
||||||
|
try:
|
||||||
# Try to catch a disconnect while getting response.
|
|
||||||
tasks = [
|
|
||||||
# Check the status of these tasks and (optionally) terminate them
|
|
||||||
# in this order. The listen_for_disconnect() task goes first
|
|
||||||
# because it should not raise unexpected errors that would prevent
|
|
||||||
# us from cancelling process_request().
|
|
||||||
asyncio.create_task(self.listen_for_disconnect(receive)),
|
|
||||||
asyncio.create_task(process_request(request, send)),
|
|
||||||
]
|
|
||||||
await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
|
|
||||||
# Now wait on both tasks (they may have both finished by now).
|
|
||||||
for task in tasks:
|
|
||||||
if task.done():
|
|
||||||
try:
|
try:
|
||||||
task.result()
|
async with asyncio.TaskGroup() as tg:
|
||||||
except RequestAborted:
|
tg.create_task(self.listen_for_disconnect(receive))
|
||||||
# Ignore client disconnects.
|
response = await self.run_get_response(request)
|
||||||
|
await self.send_response(response, send)
|
||||||
|
raise RequestProcessed
|
||||||
|
except* (RequestProcessed, RequestAborted):
|
||||||
pass
|
pass
|
||||||
except AssertionError:
|
except BaseExceptionGroup as exception_group:
|
||||||
body_file.close()
|
if len(exception_group.exceptions) == 1:
|
||||||
raise
|
raise exception_group.exceptions[0]
|
||||||
|
raise
|
||||||
|
|
||||||
|
if response is None:
|
||||||
|
await signals.request_finished.asend(sender=self.__class__)
|
||||||
else:
|
else:
|
||||||
# Allow views to handle cancellation.
|
await sync_to_async(response.close)()
|
||||||
task.cancel()
|
|
||||||
try:
|
|
||||||
await task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
# Task re-raised the CancelledError as expected.
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = tasks[1].result()
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
await signals.request_finished.asend(sender=self.__class__)
|
|
||||||
else:
|
|
||||||
await sync_to_async(response.close)()
|
|
||||||
|
|
||||||
body_file.close()
|
|
||||||
|
|
||||||
async def listen_for_disconnect(self, receive):
|
async def listen_for_disconnect(self, receive):
|
||||||
"""Listen for disconnect from the client."""
|
"""Listen for disconnect from the client."""
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import re
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import DEFAULT_DB_ALIAS, connections
|
from django.db import DEFAULT_DB_ALIAS, connections
|
||||||
from django.db.models.constants import LOOKUP_SEP
|
from django.db.models.constants import LOOKUP_SEP
|
||||||
|
from django.db.models.deletion import DatabaseOnDelete
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|
@ -163,7 +164,9 @@ class Command(BaseCommand):
|
||||||
extra_params["unique"] = True
|
extra_params["unique"] = True
|
||||||
|
|
||||||
if is_relation:
|
if is_relation:
|
||||||
ref_db_column, ref_db_table = relations[column_name]
|
ref_db_column, ref_db_table, db_on_delete = relations[
|
||||||
|
column_name
|
||||||
|
]
|
||||||
if extra_params.pop("unique", False) or extra_params.get(
|
if extra_params.pop("unique", False) or extra_params.get(
|
||||||
"primary_key"
|
"primary_key"
|
||||||
):
|
):
|
||||||
|
|
@ -191,6 +194,8 @@ class Command(BaseCommand):
|
||||||
model_name.lower(),
|
model_name.lower(),
|
||||||
att_name,
|
att_name,
|
||||||
)
|
)
|
||||||
|
if db_on_delete and isinstance(db_on_delete, DatabaseOnDelete):
|
||||||
|
extra_params["on_delete"] = f"models.{db_on_delete}"
|
||||||
used_relations.add(rel_to)
|
used_relations.add(rel_to)
|
||||||
else:
|
else:
|
||||||
# Calling `get_field_type` to get the field type string
|
# Calling `get_field_type` to get the field type string
|
||||||
|
|
@ -227,8 +232,12 @@ class Command(BaseCommand):
|
||||||
"" if "." in field_type else "models.",
|
"" if "." in field_type else "models.",
|
||||||
field_type,
|
field_type,
|
||||||
)
|
)
|
||||||
|
on_delete_qualname = extra_params.pop("on_delete", None)
|
||||||
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
|
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
|
||||||
field_desc += ", models.DO_NOTHING"
|
if on_delete_qualname:
|
||||||
|
field_desc += f", {on_delete_qualname}"
|
||||||
|
else:
|
||||||
|
field_desc += ", models.DO_NOTHING"
|
||||||
|
|
||||||
# Add comment.
|
# Add comment.
|
||||||
if connection.features.supports_comments and row.comment:
|
if connection.features.supports_comments and row.comment:
|
||||||
|
|
@ -350,21 +359,15 @@ class Command(BaseCommand):
|
||||||
if field_type in {"CharField", "TextField"} and row.collation:
|
if field_type in {"CharField", "TextField"} and row.collation:
|
||||||
field_params["db_collation"] = row.collation
|
field_params["db_collation"] = row.collation
|
||||||
|
|
||||||
if field_type == "DecimalField":
|
if field_type == "DecimalField" and (
|
||||||
if row.precision is None or row.scale is None:
|
# This can generate DecimalFields with only one of max_digits and
|
||||||
field_notes.append(
|
# decimal_fields specified. This configuration would be incorrect,
|
||||||
"max_digits and decimal_places have been guessed, as this "
|
# but nothing more correct could be generated.
|
||||||
"database handles decimal fields as float"
|
row.precision is not None
|
||||||
)
|
or row.scale is not None
|
||||||
field_params["max_digits"] = (
|
):
|
||||||
row.precision if row.precision is not None else 10
|
field_params["max_digits"] = row.precision
|
||||||
)
|
field_params["decimal_places"] = row.scale
|
||||||
field_params["decimal_places"] = (
|
|
||||||
row.scale if row.scale is not None else 5
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
field_params["max_digits"] = row.precision
|
|
||||||
field_params["decimal_places"] = row.scale
|
|
||||||
|
|
||||||
return field_type, field_params, field_notes
|
return field_type, field_params, field_notes
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -383,6 +383,9 @@ class BaseDatabaseFeatures:
|
||||||
# Does the backend support unlimited character columns?
|
# Does the backend support unlimited character columns?
|
||||||
supports_unlimited_charfield = False
|
supports_unlimited_charfield = False
|
||||||
|
|
||||||
|
# Does the backend support numeric columns with no precision?
|
||||||
|
supports_no_precision_decimalfield = False
|
||||||
|
|
||||||
# Does the backend support native tuple lookups (=, >, <, IN)?
|
# Does the backend support native tuple lookups (=, >, <, IN)?
|
||||||
supports_tuple_lookups = True
|
supports_tuple_lookups = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from django.db.models import DB_CASCADE, DB_SET_DEFAULT, DB_SET_NULL, DO_NOTHING
|
||||||
|
|
||||||
# Structure returned by DatabaseIntrospection.get_table_list()
|
# Structure returned by DatabaseIntrospection.get_table_list()
|
||||||
TableInfo = namedtuple("TableInfo", ["name", "type"])
|
TableInfo = namedtuple("TableInfo", ["name", "type"])
|
||||||
|
|
||||||
|
|
@ -15,6 +17,13 @@ class BaseDatabaseIntrospection:
|
||||||
"""Encapsulate backend-specific introspection utilities."""
|
"""Encapsulate backend-specific introspection utilities."""
|
||||||
|
|
||||||
data_types_reverse = {}
|
data_types_reverse = {}
|
||||||
|
on_delete_types = {
|
||||||
|
"CASCADE": DB_CASCADE,
|
||||||
|
"NO ACTION": DO_NOTHING,
|
||||||
|
"SET DEFAULT": DB_SET_DEFAULT,
|
||||||
|
"SET NULL": DB_SET_NULL,
|
||||||
|
# DB_RESTRICT - "RESTRICT" is not supported.
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
|
@ -169,8 +178,11 @@ class BaseDatabaseIntrospection:
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Return a dictionary of {field_name: (field_name_other_table,
|
Return a dictionary of
|
||||||
other_table)} representing all foreign keys in the given table.
|
{
|
||||||
|
field_name: (field_name_other_table, other_table, db_on_delete)
|
||||||
|
}
|
||||||
|
representing all foreign keys in the given table.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"subclasses of BaseDatabaseIntrospection may require a "
|
"subclasses of BaseDatabaseIntrospection may require a "
|
||||||
|
|
|
||||||
|
|
@ -334,6 +334,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
for column_name, (
|
for column_name, (
|
||||||
referenced_column_name,
|
referenced_column_name,
|
||||||
referenced_table_name,
|
referenced_table_name,
|
||||||
|
_,
|
||||||
) in relations.items():
|
) in relations.items():
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -196,24 +196,36 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Return a dictionary of {field_name: (field_name_other_table,
|
Return a dictionary of
|
||||||
other_table)} representing all foreign keys in the given table.
|
{
|
||||||
|
field_name: (field_name_other_table, other_table, db_on_delete)
|
||||||
|
}
|
||||||
|
representing all foreign keys in the given table.
|
||||||
"""
|
"""
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""
|
"""
|
||||||
SELECT column_name, referenced_column_name, referenced_table_name
|
SELECT
|
||||||
FROM information_schema.key_column_usage
|
kcu.column_name,
|
||||||
WHERE table_name = %s
|
kcu.referenced_column_name,
|
||||||
AND table_schema = DATABASE()
|
kcu.referenced_table_name,
|
||||||
AND referenced_table_schema = DATABASE()
|
rc.delete_rule
|
||||||
AND referenced_table_name IS NOT NULL
|
FROM
|
||||||
AND referenced_column_name IS NOT NULL
|
information_schema.key_column_usage kcu
|
||||||
|
JOIN
|
||||||
|
information_schema.referential_constraints rc
|
||||||
|
ON rc.constraint_name = kcu.constraint_name
|
||||||
|
AND rc.constraint_schema = kcu.constraint_schema
|
||||||
|
WHERE kcu.table_name = %s
|
||||||
|
AND kcu.table_schema = DATABASE()
|
||||||
|
AND kcu.referenced_table_schema = DATABASE()
|
||||||
|
AND kcu.referenced_table_name IS NOT NULL
|
||||||
|
AND kcu.referenced_column_name IS NOT NULL
|
||||||
""",
|
""",
|
||||||
[table_name],
|
[table_name],
|
||||||
)
|
)
|
||||||
return {
|
return {
|
||||||
field_name: (other_field, other_table)
|
field_name: (other_field, other_table, self.on_delete_types.get(on_delete))
|
||||||
for field_name, other_field, other_table in cursor.fetchall()
|
for field_name, other_field, other_table, on_delete in cursor.fetchall()
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_storage_engine(self, cursor, table_name):
|
def get_storage_engine(self, cursor, table_name):
|
||||||
|
|
|
||||||
|
|
@ -106,6 +106,12 @@ class _UninitializedOperatorsDescriptor:
|
||||||
return instance.__dict__["operators"]
|
return instance.__dict__["operators"]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_decimal_column(data):
|
||||||
|
if data["max_digits"] is None and data["decimal_places"] is None:
|
||||||
|
return "NUMBER"
|
||||||
|
return "NUMBER(%(max_digits)s, %(decimal_places)s)" % data
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
vendor = "oracle"
|
vendor = "oracle"
|
||||||
display_name = "Oracle"
|
display_name = "Oracle"
|
||||||
|
|
@ -125,7 +131,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
"CharField": "NVARCHAR2(%(max_length)s)",
|
"CharField": "NVARCHAR2(%(max_length)s)",
|
||||||
"DateField": "DATE",
|
"DateField": "DATE",
|
||||||
"DateTimeField": "TIMESTAMP",
|
"DateTimeField": "TIMESTAMP",
|
||||||
"DecimalField": "NUMBER(%(max_digits)s, %(decimal_places)s)",
|
"DecimalField": _get_decimal_column,
|
||||||
"DurationField": "INTERVAL DAY(9) TO SECOND(6)",
|
"DurationField": "INTERVAL DAY(9) TO SECOND(6)",
|
||||||
"FileField": "NVARCHAR2(%(max_length)s)",
|
"FileField": "NVARCHAR2(%(max_length)s)",
|
||||||
"FilePathField": "NVARCHAR2(%(max_length)s)",
|
"FilePathField": "NVARCHAR2(%(max_length)s)",
|
||||||
|
|
|
||||||
|
|
@ -79,6 +79,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
supports_json_negative_indexing = False
|
supports_json_negative_indexing = False
|
||||||
supports_collation_on_textfield = False
|
supports_collation_on_textfield = False
|
||||||
supports_on_delete_db_default = False
|
supports_on_delete_db_default = False
|
||||||
|
supports_no_precision_decimalfield = True
|
||||||
test_now_utc_template = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
|
test_now_utc_template = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
|
||||||
django_test_expected_failures = {
|
django_test_expected_failures = {
|
||||||
# A bug in Django/oracledb with respect to string handling (#23843).
|
# A bug in Django/oracledb with respect to string handling (#23843).
|
||||||
|
|
|
||||||
|
|
@ -194,14 +194,21 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
comment,
|
comment,
|
||||||
) = field_map[name]
|
) = field_map[name]
|
||||||
name %= {} # oracledb, for some reason, doubles percent signs.
|
name %= {} # oracledb, for some reason, doubles percent signs.
|
||||||
|
if desc[1] == oracledb.NUMBER and desc[5] == -127 and desc[4] == 0:
|
||||||
|
# DecimalField with no precision.
|
||||||
|
precision = None
|
||||||
|
scale = None
|
||||||
|
else:
|
||||||
|
precision = desc[4] or 0
|
||||||
|
scale = desc[5] or 0
|
||||||
description.append(
|
description.append(
|
||||||
FieldInfo(
|
FieldInfo(
|
||||||
self.identifier_converter(name),
|
self.identifier_converter(name),
|
||||||
desc[1],
|
desc[1],
|
||||||
display_size,
|
display_size,
|
||||||
desc[3],
|
desc[3],
|
||||||
desc[4] or 0,
|
precision,
|
||||||
desc[5] or 0,
|
scale,
|
||||||
*desc[6:],
|
*desc[6:],
|
||||||
default,
|
default,
|
||||||
collation,
|
collation,
|
||||||
|
|
@ -254,13 +261,16 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Return a dictionary of {field_name: (field_name_other_table,
|
Return a dictionary of
|
||||||
other_table)} representing all foreign keys in the given table.
|
{
|
||||||
|
field_name: (field_name_other_table, other_table, db_on_delete)
|
||||||
|
}
|
||||||
|
representing all foreign keys in the given table.
|
||||||
"""
|
"""
|
||||||
table_name = table_name.upper()
|
table_name = table_name.upper()
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""
|
"""
|
||||||
SELECT ca.column_name, cb.table_name, cb.column_name
|
SELECT ca.column_name, cb.table_name, cb.column_name, user_constraints.delete_rule
|
||||||
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb
|
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb
|
||||||
WHERE user_constraints.table_name = %s AND
|
WHERE user_constraints.table_name = %s AND
|
||||||
user_constraints.constraint_name = ca.constraint_name AND
|
user_constraints.constraint_name = ca.constraint_name AND
|
||||||
|
|
@ -273,8 +283,14 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
self.identifier_converter(field_name): (
|
self.identifier_converter(field_name): (
|
||||||
self.identifier_converter(rel_field_name),
|
self.identifier_converter(rel_field_name),
|
||||||
self.identifier_converter(rel_table_name),
|
self.identifier_converter(rel_table_name),
|
||||||
|
self.on_delete_types.get(on_delete),
|
||||||
)
|
)
|
||||||
for field_name, rel_table_name, rel_field_name in cursor.fetchall()
|
for (
|
||||||
|
field_name,
|
||||||
|
rel_table_name,
|
||||||
|
rel_field_name,
|
||||||
|
on_delete,
|
||||||
|
) in cursor.fetchall()
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_primary_key_columns(self, cursor, table_name):
|
def get_primary_key_columns(self, cursor, table_name):
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import asyncio
|
||||||
import threading
|
import threading
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
@ -29,6 +30,7 @@ except ImportError:
|
||||||
raise ImproperlyConfigured("Error loading psycopg2 or psycopg module")
|
raise ImproperlyConfigured("Error loading psycopg2 or psycopg module")
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache
|
||||||
def psycopg_version():
|
def psycopg_version():
|
||||||
version = Database.__version__.split(" ", 1)[0]
|
version = Database.__version__.split(" ", 1)[0]
|
||||||
return get_version_tuple(version)
|
return get_version_tuple(version)
|
||||||
|
|
@ -87,6 +89,12 @@ def _get_varchar_column(data):
|
||||||
return "varchar(%(max_length)s)" % data
|
return "varchar(%(max_length)s)" % data
|
||||||
|
|
||||||
|
|
||||||
|
def _get_decimal_column(data):
|
||||||
|
if data["max_digits"] is None and data["decimal_places"] is None:
|
||||||
|
return "numeric"
|
||||||
|
return "numeric(%(max_digits)s, %(decimal_places)s)" % data
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
vendor = "postgresql"
|
vendor = "postgresql"
|
||||||
display_name = "PostgreSQL"
|
display_name = "PostgreSQL"
|
||||||
|
|
@ -103,7 +111,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
"CharField": _get_varchar_column,
|
"CharField": _get_varchar_column,
|
||||||
"DateField": "date",
|
"DateField": "date",
|
||||||
"DateTimeField": "timestamp with time zone",
|
"DateTimeField": "timestamp with time zone",
|
||||||
"DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)",
|
"DecimalField": _get_decimal_column,
|
||||||
"DurationField": "interval",
|
"DurationField": "interval",
|
||||||
"FileField": "varchar(%(max_length)s)",
|
"FileField": "varchar(%(max_length)s)",
|
||||||
"FilePathField": "varchar(%(max_length)s)",
|
"FilePathField": "varchar(%(max_length)s)",
|
||||||
|
|
|
||||||
|
|
@ -68,6 +68,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
supports_covering_indexes = True
|
supports_covering_indexes = True
|
||||||
supports_stored_generated_columns = True
|
supports_stored_generated_columns = True
|
||||||
supports_nulls_distinct_unique_constraints = True
|
supports_nulls_distinct_unique_constraints = True
|
||||||
|
supports_no_precision_decimalfield = True
|
||||||
can_rename_index = True
|
can_rename_index = True
|
||||||
test_collations = {
|
test_collations = {
|
||||||
"deterministic": "C",
|
"deterministic": "C",
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,8 @@ from collections import namedtuple
|
||||||
from django.db.backends.base.introspection import BaseDatabaseIntrospection
|
from django.db.backends.base.introspection import BaseDatabaseIntrospection
|
||||||
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
|
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
|
||||||
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
|
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
|
||||||
from django.db.models import Index
|
from django.db.backends.postgresql.base import psycopg_version
|
||||||
|
from django.db.models import DB_CASCADE, DB_SET_DEFAULT, DB_SET_NULL, DO_NOTHING, Index
|
||||||
|
|
||||||
FieldInfo = namedtuple("FieldInfo", [*BaseFieldInfo._fields, "is_autofield", "comment"])
|
FieldInfo = namedtuple("FieldInfo", [*BaseFieldInfo._fields, "is_autofield", "comment"])
|
||||||
TableInfo = namedtuple("TableInfo", [*BaseTableInfo._fields, "comment"])
|
TableInfo = namedtuple("TableInfo", [*BaseTableInfo._fields, "comment"])
|
||||||
|
|
@ -38,6 +39,14 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
|
|
||||||
ignored_tables = []
|
ignored_tables = []
|
||||||
|
|
||||||
|
on_delete_types = {
|
||||||
|
"a": DO_NOTHING,
|
||||||
|
"c": DB_CASCADE,
|
||||||
|
"d": DB_SET_DEFAULT,
|
||||||
|
"n": DB_SET_NULL,
|
||||||
|
# DB_RESTRICT - "r" is not supported.
|
||||||
|
}
|
||||||
|
|
||||||
def get_field_type(self, data_type, description):
|
def get_field_type(self, data_type, description):
|
||||||
field_type = super().get_field_type(data_type, description)
|
field_type = super().get_field_type(data_type, description)
|
||||||
if description.is_autofield or (
|
if description.is_autofield or (
|
||||||
|
|
@ -112,15 +121,26 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
|
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# PostgreSQL OIDs may vary depending on the installation, especially
|
||||||
|
# for datatypes from extensions, e.g. "hstore". In such cases, the
|
||||||
|
# type_display attribute (psycopg 3.2+) should be used.
|
||||||
|
type_display_available = psycopg_version() >= (3, 2)
|
||||||
return [
|
return [
|
||||||
FieldInfo(
|
FieldInfo(
|
||||||
line.name,
|
line.name,
|
||||||
line.type_code,
|
(
|
||||||
|
line.type_display
|
||||||
|
if type_display_available and line.type_display == "hstore"
|
||||||
|
else line.type_code
|
||||||
|
),
|
||||||
# display_size is always None on psycopg2.
|
# display_size is always None on psycopg2.
|
||||||
line.internal_size if line.display_size is None else line.display_size,
|
line.internal_size if line.display_size is None else line.display_size,
|
||||||
line.internal_size,
|
line.internal_size,
|
||||||
line.precision,
|
# precision and scale are always 2^16 - 1 on psycopg2 for
|
||||||
line.scale,
|
# DecimalFields with no precision.
|
||||||
|
None if line.precision == 2**16 - 1 else line.precision,
|
||||||
|
None if line.scale == 2**16 - 1 else line.scale,
|
||||||
*field_map[line.name],
|
*field_map[line.name],
|
||||||
)
|
)
|
||||||
for line in cursor.description
|
for line in cursor.description
|
||||||
|
|
@ -154,12 +174,15 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Return a dictionary of {field_name: (field_name_other_table,
|
Return a dictionary of
|
||||||
other_table)} representing all foreign keys in the given table.
|
{
|
||||||
|
field_name: (field_name_other_table, other_table, db_on_delete)
|
||||||
|
}
|
||||||
|
representing all foreign keys in the given table.
|
||||||
"""
|
"""
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""
|
"""
|
||||||
SELECT a1.attname, c2.relname, a2.attname
|
SELECT a1.attname, c2.relname, a2.attname, con.confdeltype
|
||||||
FROM pg_constraint con
|
FROM pg_constraint con
|
||||||
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
|
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
|
||||||
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
|
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
|
||||||
|
|
@ -175,7 +198,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
""",
|
""",
|
||||||
[table_name],
|
[table_name],
|
||||||
)
|
)
|
||||||
return {row[0]: (row[2], row[1]) for row in cursor.fetchall()}
|
return {
|
||||||
|
row[0]: (row[2], row[1], self.on_delete_types.get(row[3]))
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
}
|
||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
insert_test_table_with_defaults = 'INSERT INTO {} ("null") VALUES (1)'
|
insert_test_table_with_defaults = 'INSERT INTO {} ("null") VALUES (1)'
|
||||||
supports_default_keyword_in_insert = False
|
supports_default_keyword_in_insert = False
|
||||||
supports_unlimited_charfield = True
|
supports_unlimited_charfield = True
|
||||||
|
supports_no_precision_decimalfield = True
|
||||||
can_return_columns_from_insert = True
|
can_return_columns_from_insert = True
|
||||||
can_return_rows_from_bulk_insert = True
|
can_return_rows_from_bulk_insert = True
|
||||||
can_return_rows_from_update = True
|
can_return_rows_from_update = True
|
||||||
|
|
|
||||||
|
|
@ -153,20 +153,27 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
Return a dictionary of {column_name: (ref_column_name, ref_table_name)}
|
Return a dictionary of
|
||||||
|
{column_name: (ref_column_name, ref_table_name, db_on_delete)}
|
||||||
representing all foreign keys in the given table.
|
representing all foreign keys in the given table.
|
||||||
"""
|
"""
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"PRAGMA foreign_key_list(%s)" % self.connection.ops.quote_name(table_name)
|
"PRAGMA foreign_key_list(%s)" % self.connection.ops.quote_name(table_name)
|
||||||
)
|
)
|
||||||
return {
|
return {
|
||||||
column_name: (ref_column_name, ref_table_name)
|
column_name: (
|
||||||
|
ref_column_name,
|
||||||
|
ref_table_name,
|
||||||
|
self.on_delete_types.get(on_delete),
|
||||||
|
)
|
||||||
for (
|
for (
|
||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
ref_table_name,
|
ref_table_name,
|
||||||
column_name,
|
column_name,
|
||||||
ref_column_name,
|
ref_column_name,
|
||||||
|
_,
|
||||||
|
on_delete,
|
||||||
*_,
|
*_,
|
||||||
) in cursor.fetchall()
|
) in cursor.fetchall()
|
||||||
}
|
}
|
||||||
|
|
@ -407,7 +414,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
"check": False,
|
"check": False,
|
||||||
"index": False,
|
"index": False,
|
||||||
}
|
}
|
||||||
for index, (column_name, (ref_column_name, ref_table_name)) in relations
|
for index, (
|
||||||
|
column_name,
|
||||||
|
(ref_column_name, ref_table_name, _),
|
||||||
|
) in relations
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return constraints
|
return constraints
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import datetime
|
import datetime
|
||||||
import decimal
|
import decimal
|
||||||
|
import sqlite3
|
||||||
import uuid
|
import uuid
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
@ -143,16 +144,15 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
"""
|
"""
|
||||||
Only for last_executed_query! Don't use this to execute SQL queries!
|
Only for last_executed_query! Don't use this to execute SQL queries!
|
||||||
"""
|
"""
|
||||||
# This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the
|
connection = self.connection.connection
|
||||||
# number of parameters, default = 999) and SQLITE_MAX_COLUMN (the
|
variable_limit = self.connection.features.max_query_params
|
||||||
# number of return values, default = 2000). Since Python's sqlite3
|
column_limit = connection.getlimit(sqlite3.SQLITE_LIMIT_COLUMN)
|
||||||
# module doesn't expose the get_limit() C API, assume the default
|
batch_size = min(variable_limit, column_limit)
|
||||||
# limits are in effect and split the work in batches if needed.
|
|
||||||
BATCH_SIZE = 999
|
if len(params) > batch_size:
|
||||||
if len(params) > BATCH_SIZE:
|
|
||||||
results = ()
|
results = ()
|
||||||
for index in range(0, len(params), BATCH_SIZE):
|
for index in range(0, len(params), batch_size):
|
||||||
chunk = params[index : index + BATCH_SIZE]
|
chunk = params[index : index + batch_size]
|
||||||
results += self._quote_params_for_last_executed_query(chunk)
|
results += self._quote_params_for_last_executed_query(chunk)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -109,11 +109,11 @@ class MigrationLoader:
|
||||||
if was_loaded:
|
if was_loaded:
|
||||||
reload(module)
|
reload(module)
|
||||||
self.migrated_apps.add(app_config.label)
|
self.migrated_apps.add(app_config.label)
|
||||||
migration_names = {
|
migration_names = [
|
||||||
name
|
name
|
||||||
for _, name, is_pkg in pkgutil.iter_modules(module.__path__)
|
for _, name, is_pkg in pkgutil.iter_modules(module.__path__)
|
||||||
if not is_pkg and name[0] not in "_~"
|
if not is_pkg and name[0] not in "_~"
|
||||||
}
|
]
|
||||||
# Load migrations
|
# Load migrations
|
||||||
for migration_name in migration_names:
|
for migration_name in migration_names:
|
||||||
migration_path = "%s.%s" % (module_name, migration_name)
|
migration_path = "%s.%s" % (module_name, migration_name)
|
||||||
|
|
|
||||||
|
|
@ -222,6 +222,17 @@ class FunctoolsPartialSerializer(BaseSerializer):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GenericAliasSerializer(BaseSerializer):
|
||||||
|
def serialize(self):
|
||||||
|
imports = set()
|
||||||
|
# Avoid iterating self.value, because it returns itself.
|
||||||
|
# https://github.com/python/cpython/issues/103450
|
||||||
|
for item in self.value.__args__:
|
||||||
|
_, item_imports = serializer_factory(item).serialize()
|
||||||
|
imports.update(item_imports)
|
||||||
|
return repr(self.value), imports
|
||||||
|
|
||||||
|
|
||||||
class IterableSerializer(BaseSerializer):
|
class IterableSerializer(BaseSerializer):
|
||||||
def serialize(self):
|
def serialize(self):
|
||||||
imports = set()
|
imports = set()
|
||||||
|
|
@ -364,6 +375,7 @@ class Serializer:
|
||||||
decimal.Decimal: DecimalSerializer,
|
decimal.Decimal: DecimalSerializer,
|
||||||
(functools.partial, functools.partialmethod): FunctoolsPartialSerializer,
|
(functools.partial, functools.partialmethod): FunctoolsPartialSerializer,
|
||||||
FUNCTION_TYPES: FunctionTypeSerializer,
|
FUNCTION_TYPES: FunctionTypeSerializer,
|
||||||
|
types.GenericAlias: GenericAliasSerializer,
|
||||||
collections.abc.Iterable: IterableSerializer,
|
collections.abc.Iterable: IterableSerializer,
|
||||||
(COMPILED_REGEX_TYPE, RegexObject): RegexSerializer,
|
(COMPILED_REGEX_TYPE, RegexObject): RegexSerializer,
|
||||||
uuid.UUID: UUIDSerializer,
|
uuid.UUID: UUIDSerializer,
|
||||||
|
|
|
||||||
|
|
@ -1801,7 +1801,7 @@ class Model(AltersData, metaclass=ModelBase):
|
||||||
meta = cls._meta
|
meta = cls._meta
|
||||||
pk = meta.pk
|
pk = meta.pk
|
||||||
|
|
||||||
if not isinstance(pk, CompositePrimaryKey):
|
if meta.proxy or not isinstance(pk, CompositePrimaryKey):
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
seen_columns = defaultdict(list)
|
seen_columns = defaultdict(list)
|
||||||
|
|
|
||||||
|
|
@ -51,9 +51,12 @@ class BaseConstraint:
|
||||||
def _expression_refs_exclude(cls, model, expression, exclude):
|
def _expression_refs_exclude(cls, model, expression, exclude):
|
||||||
get_field = model._meta.get_field
|
get_field = model._meta.get_field
|
||||||
for field_name, *__ in model._get_expr_references(expression):
|
for field_name, *__ in model._get_expr_references(expression):
|
||||||
if field_name in exclude:
|
if field_name == "pk":
|
||||||
|
field = model._meta.pk
|
||||||
|
else:
|
||||||
|
field = get_field(field_name)
|
||||||
|
if field_name in exclude or field.name in exclude:
|
||||||
return True
|
return True
|
||||||
field = get_field(field_name)
|
|
||||||
if field.generated and cls._expression_refs_exclude(
|
if field.generated and cls._expression_refs_exclude(
|
||||||
model, field.expression, exclude
|
model, field.expression, exclude
|
||||||
):
|
):
|
||||||
|
|
|
||||||
|
|
@ -1725,54 +1725,84 @@ class DecimalField(Field):
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
def _check_decimal_places(self):
|
def _check_decimal_places(self):
|
||||||
try:
|
if self.decimal_places is None:
|
||||||
decimal_places = int(self.decimal_places)
|
if (
|
||||||
if decimal_places < 0:
|
not connection.features.supports_no_precision_decimalfield
|
||||||
raise ValueError()
|
and "supports_no_precision_decimalfield"
|
||||||
except TypeError:
|
not in self.model._meta.required_db_features
|
||||||
return [
|
):
|
||||||
checks.Error(
|
return [
|
||||||
"DecimalFields must define a 'decimal_places' attribute.",
|
checks.Error(
|
||||||
obj=self,
|
"DecimalFields must define a 'decimal_places' attribute.",
|
||||||
id="fields.E130",
|
obj=self,
|
||||||
)
|
id="fields.E130",
|
||||||
]
|
)
|
||||||
except ValueError:
|
]
|
||||||
return [
|
elif self.max_digits is not None:
|
||||||
checks.Error(
|
return [
|
||||||
"'decimal_places' must be a non-negative integer.",
|
checks.Error(
|
||||||
obj=self,
|
"DecimalField’s max_digits and decimal_places must both "
|
||||||
id="fields.E131",
|
"be defined or both omitted.",
|
||||||
)
|
obj=self,
|
||||||
]
|
id="fields.E135",
|
||||||
|
),
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
return []
|
try:
|
||||||
|
decimal_places = int(self.decimal_places)
|
||||||
|
if decimal_places < 0:
|
||||||
|
raise ValueError()
|
||||||
|
except ValueError:
|
||||||
|
return [
|
||||||
|
checks.Error(
|
||||||
|
"'decimal_places' must be a non-negative integer.",
|
||||||
|
obj=self,
|
||||||
|
id="fields.E131",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
def _check_max_digits(self):
|
def _check_max_digits(self):
|
||||||
try:
|
if self.max_digits is None:
|
||||||
max_digits = int(self.max_digits)
|
if (
|
||||||
if max_digits <= 0:
|
not connection.features.supports_no_precision_decimalfield
|
||||||
raise ValueError()
|
and "supports_no_precision_decimalfield"
|
||||||
except TypeError:
|
not in self.model._meta.required_db_features
|
||||||
return [
|
):
|
||||||
checks.Error(
|
return [
|
||||||
"DecimalFields must define a 'max_digits' attribute.",
|
checks.Error(
|
||||||
obj=self,
|
"DecimalFields must define a 'max_digits' attribute.",
|
||||||
id="fields.E132",
|
obj=self,
|
||||||
)
|
id="fields.E132",
|
||||||
]
|
)
|
||||||
except ValueError:
|
]
|
||||||
return [
|
elif self.decimal_places is not None:
|
||||||
checks.Error(
|
return [
|
||||||
"'max_digits' must be a positive integer.",
|
checks.Error(
|
||||||
obj=self,
|
"DecimalField’s max_digits and decimal_places must both "
|
||||||
id="fields.E133",
|
"be defined or both omitted.",
|
||||||
)
|
obj=self,
|
||||||
]
|
id="fields.E135",
|
||||||
|
),
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
return []
|
try:
|
||||||
|
max_digits = int(self.max_digits)
|
||||||
|
if max_digits <= 0:
|
||||||
|
raise ValueError()
|
||||||
|
except ValueError:
|
||||||
|
return [
|
||||||
|
checks.Error(
|
||||||
|
"'max_digits' must be a positive integer.",
|
||||||
|
obj=self,
|
||||||
|
id="fields.E133",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
def _check_decimal_places_and_max_digits(self, **kwargs):
|
def _check_decimal_places_and_max_digits(self, **kwargs):
|
||||||
|
if self.decimal_places is None or self.max_digits is None:
|
||||||
|
return []
|
||||||
if int(self.decimal_places) > int(self.max_digits):
|
if int(self.decimal_places) > int(self.max_digits):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ from django.db.models.lookups import (
|
||||||
PostgresOperatorLookup,
|
PostgresOperatorLookup,
|
||||||
Transform,
|
Transform,
|
||||||
)
|
)
|
||||||
|
from django.utils.deconstruct import deconstructible
|
||||||
from django.utils.deprecation import RemovedInDjango70Warning, django_file_prefixes
|
from django.utils.deprecation import RemovedInDjango70Warning, django_file_prefixes
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
|
@ -150,6 +151,7 @@ class JSONField(CheckFieldDefaultMixin, Field):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@deconstructible(path="django.db.models.JSONNull")
|
||||||
class JSONNull(expressions.Value):
|
class JSONNull(expressions.Value):
|
||||||
"""Represent JSON `null` primitive."""
|
"""Represent JSON `null` primitive."""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -46,6 +46,8 @@ MAX_GET_RESULTS = 21
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
REPR_OUTPUT_SIZE = 20
|
REPR_OUTPUT_SIZE = 20
|
||||||
|
|
||||||
|
PROHIBITED_FILTER_KWARGS = frozenset(["_connector", "_negated"])
|
||||||
|
|
||||||
|
|
||||||
class BaseIterable:
|
class BaseIterable:
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
@ -840,8 +842,7 @@ class QuerySet(AltersData):
|
||||||
)
|
)
|
||||||
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
|
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
|
||||||
for result, field in zip(results, opts.db_returning_fields):
|
for result, field in zip(results, opts.db_returning_fields):
|
||||||
if field != opts.pk:
|
setattr(obj_with_pk, field.attname, result)
|
||||||
setattr(obj_with_pk, field.attname, result)
|
|
||||||
for obj_with_pk in objs_with_pk:
|
for obj_with_pk in objs_with_pk:
|
||||||
obj_with_pk._state.adding = False
|
obj_with_pk._state.adding = False
|
||||||
obj_with_pk._state.db = self.db
|
obj_with_pk._state.db = self.db
|
||||||
|
|
@ -1645,6 +1646,9 @@ class QuerySet(AltersData):
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
def _filter_or_exclude_inplace(self, negate, args, kwargs):
|
def _filter_or_exclude_inplace(self, negate, args, kwargs):
|
||||||
|
if invalid_kwargs := PROHIBITED_FILTER_KWARGS.intersection(kwargs):
|
||||||
|
invalid_kwargs_str = ", ".join(f"'{k}'" for k in sorted(invalid_kwargs))
|
||||||
|
raise TypeError(f"The following kwargs are invalid: {invalid_kwargs_str}")
|
||||||
if negate:
|
if negate:
|
||||||
self._query.add_q(~Q(*args, **kwargs))
|
self._query.add_q(~Q(*args, **kwargs))
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -48,8 +48,12 @@ class Q(tree.Node):
|
||||||
XOR = "XOR"
|
XOR = "XOR"
|
||||||
default = AND
|
default = AND
|
||||||
conditional = True
|
conditional = True
|
||||||
|
connectors = (None, AND, OR, XOR)
|
||||||
|
|
||||||
def __init__(self, *args, _connector=None, _negated=False, **kwargs):
|
def __init__(self, *args, _connector=None, _negated=False, **kwargs):
|
||||||
|
if _connector not in self.connectors:
|
||||||
|
connector_reprs = ", ".join(f"{conn!r}" for conn in self.connectors[1:])
|
||||||
|
raise ValueError(f"_connector must be one of {connector_reprs}, or None.")
|
||||||
super().__init__(
|
super().__init__(
|
||||||
children=[*args, *sorted(kwargs.items())],
|
children=[*args, *sorted(kwargs.items())],
|
||||||
connector=_connector,
|
connector=_connector,
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,28 @@ NONE_ID = _make_id(None)
|
||||||
NO_RECEIVERS = object()
|
NO_RECEIVERS = object()
|
||||||
|
|
||||||
|
|
||||||
|
async def _gather(*coros):
|
||||||
|
if len(coros) == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if len(coros) == 1:
|
||||||
|
return [await coros[0]]
|
||||||
|
|
||||||
|
async def run(i, coro):
|
||||||
|
results[i] = await coro
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with asyncio.TaskGroup() as tg:
|
||||||
|
results = [None] * len(coros)
|
||||||
|
for i, coro in enumerate(coros):
|
||||||
|
tg.create_task(run(i, coro))
|
||||||
|
return results
|
||||||
|
except BaseExceptionGroup as exception_group:
|
||||||
|
if len(exception_group.exceptions) == 1:
|
||||||
|
raise exception_group.exceptions[0]
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
class Signal:
|
class Signal:
|
||||||
"""
|
"""
|
||||||
Base class for all signals
|
Base class for all signals
|
||||||
|
|
@ -186,7 +208,7 @@ class Signal:
|
||||||
|
|
||||||
If any receivers are asynchronous, they are called after all the
|
If any receivers are asynchronous, they are called after all the
|
||||||
synchronous receivers via a single call to async_to_sync(). They are
|
synchronous receivers via a single call to async_to_sync(). They are
|
||||||
also executed concurrently with asyncio.gather().
|
also executed concurrently with asyncio.TaskGroup().
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
|
|
@ -211,7 +233,7 @@ class Signal:
|
||||||
if async_receivers:
|
if async_receivers:
|
||||||
|
|
||||||
async def asend():
|
async def asend():
|
||||||
async_responses = await asyncio.gather(
|
async_responses = await _gather(
|
||||||
*(
|
*(
|
||||||
receiver(signal=self, sender=sender, **named)
|
receiver(signal=self, sender=sender, **named)
|
||||||
for receiver in async_receivers
|
for receiver in async_receivers
|
||||||
|
|
@ -235,7 +257,7 @@ class Signal:
|
||||||
sync_to_async() adaption before executing any asynchronous receivers.
|
sync_to_async() adaption before executing any asynchronous receivers.
|
||||||
|
|
||||||
If any receivers are asynchronous, they are grouped and executed
|
If any receivers are asynchronous, they are grouped and executed
|
||||||
concurrently with asyncio.gather().
|
concurrently with asyncio.TaskGroup().
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
|
|
@ -268,9 +290,9 @@ class Signal:
|
||||||
async def sync_send():
|
async def sync_send():
|
||||||
return []
|
return []
|
||||||
|
|
||||||
responses, async_responses = await asyncio.gather(
|
responses, async_responses = await _gather(
|
||||||
sync_send(),
|
sync_send(),
|
||||||
asyncio.gather(
|
_gather(
|
||||||
*(
|
*(
|
||||||
receiver(signal=self, sender=sender, **named)
|
receiver(signal=self, sender=sender, **named)
|
||||||
for receiver in async_receivers
|
for receiver in async_receivers
|
||||||
|
|
@ -294,7 +316,7 @@ class Signal:
|
||||||
|
|
||||||
If any receivers are asynchronous, they are called after all the
|
If any receivers are asynchronous, they are called after all the
|
||||||
synchronous receivers via a single call to async_to_sync(). They are
|
synchronous receivers via a single call to async_to_sync(). They are
|
||||||
also executed concurrently with asyncio.gather().
|
also executed concurrently with asyncio.TaskGroup().
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
|
|
@ -340,7 +362,7 @@ class Signal:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def asend():
|
async def asend():
|
||||||
async_responses = await asyncio.gather(
|
async_responses = await _gather(
|
||||||
*(
|
*(
|
||||||
asend_and_wrap_exception(receiver)
|
asend_and_wrap_exception(receiver)
|
||||||
for receiver in async_receivers
|
for receiver in async_receivers
|
||||||
|
|
@ -359,7 +381,7 @@ class Signal:
|
||||||
sync_to_async() adaption before executing any asynchronous receivers.
|
sync_to_async() adaption before executing any asynchronous receivers.
|
||||||
|
|
||||||
If any receivers are asynchronous, they are grouped and executed
|
If any receivers are asynchronous, they are grouped and executed
|
||||||
concurrently with asyncio.gather.
|
concurrently with asyncio.TaskGroup.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
|
|
@ -414,9 +436,9 @@ class Signal:
|
||||||
return err
|
return err
|
||||||
return response
|
return response
|
||||||
|
|
||||||
responses, async_responses = await asyncio.gather(
|
responses, async_responses = await _gather(
|
||||||
sync_send(),
|
sync_send(),
|
||||||
asyncio.gather(
|
_gather(
|
||||||
*(asend_and_wrap_exception(receiver) for receiver in async_receivers),
|
*(asend_and_wrap_exception(receiver) for receiver in async_receivers),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -192,7 +192,9 @@ class BoundField(RenderableFieldMixin):
|
||||||
if id_:
|
if id_:
|
||||||
id_for_label = widget.id_for_label(id_)
|
id_for_label = widget.id_for_label(id_)
|
||||||
if id_for_label:
|
if id_for_label:
|
||||||
attrs = {**(attrs or {}), "for": id_for_label}
|
attrs = attrs or {}
|
||||||
|
if tag != "legend":
|
||||||
|
attrs = {**attrs, "for": id_for_label}
|
||||||
if self.field.required and hasattr(self.form, "required_css_class"):
|
if self.field.required and hasattr(self.form, "required_css_class"):
|
||||||
attrs = attrs or {}
|
attrs = attrs or {}
|
||||||
if "class" in attrs:
|
if "class" in attrs:
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ from django.utils import timezone
|
||||||
from django.utils.datastructures import CaseInsensitiveMapping
|
from django.utils.datastructures import CaseInsensitiveMapping
|
||||||
from django.utils.encoding import iri_to_uri
|
from django.utils.encoding import iri_to_uri
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.http import content_disposition_header, http_date
|
from django.utils.http import MAX_URL_LENGTH, content_disposition_header, http_date
|
||||||
from django.utils.regex_helper import _lazy_re_compile
|
from django.utils.regex_helper import _lazy_re_compile
|
||||||
|
|
||||||
_charset_from_content_type_re = _lazy_re_compile(
|
_charset_from_content_type_re = _lazy_re_compile(
|
||||||
|
|
@ -631,7 +631,12 @@ class HttpResponseRedirectBase(HttpResponse):
|
||||||
def __init__(self, redirect_to, preserve_request=False, *args, **kwargs):
|
def __init__(self, redirect_to, preserve_request=False, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self["Location"] = iri_to_uri(redirect_to)
|
self["Location"] = iri_to_uri(redirect_to)
|
||||||
parsed = urlsplit(str(redirect_to))
|
redirect_to_str = str(redirect_to)
|
||||||
|
if len(redirect_to_str) > MAX_URL_LENGTH:
|
||||||
|
raise DisallowedRedirect(
|
||||||
|
f"Unsafe redirect exceeding {MAX_URL_LENGTH} characters"
|
||||||
|
)
|
||||||
|
parsed = urlsplit(redirect_to_str)
|
||||||
if preserve_request:
|
if preserve_request:
|
||||||
self.status_code = self.status_code_preserve_request
|
self.status_code = self.status_code_preserve_request
|
||||||
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
|
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import html
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import warnings
|
import warnings
|
||||||
|
from collections import deque
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit
|
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit
|
||||||
|
|
@ -297,6 +298,7 @@ class Urlizer:
|
||||||
simple_url_re = _lazy_re_compile(r"^https?://\[?\w", re.IGNORECASE)
|
simple_url_re = _lazy_re_compile(r"^https?://\[?\w", re.IGNORECASE)
|
||||||
simple_url_2_re = _lazy_re_compile(
|
simple_url_2_re = _lazy_re_compile(
|
||||||
rf"^www\.|^(?!http)(?:{DomainNameValidator.hostname_re})"
|
rf"^www\.|^(?!http)(?:{DomainNameValidator.hostname_re})"
|
||||||
|
rf"(?:{DomainNameValidator.domain_re})"
|
||||||
r"\.(com|edu|gov|int|mil|net|org)($|/.*)$",
|
r"\.(com|edu|gov|int|mil|net|org)($|/.*)$",
|
||||||
re.IGNORECASE,
|
re.IGNORECASE,
|
||||||
)
|
)
|
||||||
|
|
@ -428,7 +430,7 @@ class Urlizer:
|
||||||
# Strip all opening wrapping punctuation.
|
# Strip all opening wrapping punctuation.
|
||||||
middle = word.lstrip(self.wrapping_punctuation_openings)
|
middle = word.lstrip(self.wrapping_punctuation_openings)
|
||||||
lead = word[: len(word) - len(middle)]
|
lead = word[: len(word) - len(middle)]
|
||||||
trail = ""
|
trail = deque()
|
||||||
|
|
||||||
# Continue trimming until middle remains unchanged.
|
# Continue trimming until middle remains unchanged.
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
|
|
@ -441,7 +443,7 @@ class Urlizer:
|
||||||
rstripped = middle.rstrip(closing)
|
rstripped = middle.rstrip(closing)
|
||||||
if rstripped != middle:
|
if rstripped != middle:
|
||||||
strip = counts[closing] - counts[opening]
|
strip = counts[closing] - counts[opening]
|
||||||
trail = middle[-strip:]
|
trail.appendleft(middle[-strip:])
|
||||||
middle = middle[:-strip]
|
middle = middle[:-strip]
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
counts[closing] -= strip
|
counts[closing] -= strip
|
||||||
|
|
@ -452,7 +454,7 @@ class Urlizer:
|
||||||
else:
|
else:
|
||||||
rstripped = middle.rstrip(self.trailing_punctuation_chars_no_semicolon)
|
rstripped = middle.rstrip(self.trailing_punctuation_chars_no_semicolon)
|
||||||
if rstripped != middle:
|
if rstripped != middle:
|
||||||
trail = middle[len(rstripped) :] + trail
|
trail.appendleft(middle[len(rstripped) :])
|
||||||
middle = rstripped
|
middle = rstripped
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
|
|
||||||
|
|
@ -469,13 +471,14 @@ class Urlizer:
|
||||||
# entity.
|
# entity.
|
||||||
recent_semicolon = middle[trail_start:].index(";")
|
recent_semicolon = middle[trail_start:].index(";")
|
||||||
middle_semicolon_index = recent_semicolon + trail_start + 1
|
middle_semicolon_index = recent_semicolon + trail_start + 1
|
||||||
trail = middle[middle_semicolon_index:] + trail
|
trail.appendleft(middle[middle_semicolon_index:])
|
||||||
middle = rstripped + middle[trail_start:middle_semicolon_index]
|
middle = rstripped + middle[trail_start:middle_semicolon_index]
|
||||||
else:
|
else:
|
||||||
trail = middle[trail_start:] + trail
|
trail.appendleft(middle[trail_start:])
|
||||||
middle = rstripped
|
middle = rstripped
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
|
|
||||||
|
trail = "".join(trail)
|
||||||
return lead, middle, trail
|
return lead, middle, trail
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
||||||
|
|
@ -169,11 +169,11 @@ def int_to_base36(i):
|
||||||
raise ValueError("Negative base36 conversion input.")
|
raise ValueError("Negative base36 conversion input.")
|
||||||
if i < 36:
|
if i < 36:
|
||||||
return char_set[i]
|
return char_set[i]
|
||||||
b36 = ""
|
b36_parts = []
|
||||||
while i != 0:
|
while i != 0:
|
||||||
i, n = divmod(i, 36)
|
i, n = divmod(i, 36)
|
||||||
b36 = char_set[n] + b36
|
b36_parts.append(char_set[n])
|
||||||
return b36
|
return "".join(reversed(b36_parts))
|
||||||
|
|
||||||
|
|
||||||
def urlsafe_base64_encode(s):
|
def urlsafe_base64_encode(s):
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,24 @@
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
|
from django.utils.version import PY314
|
||||||
|
|
||||||
|
if PY314:
|
||||||
|
import annotationlib
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=512)
|
@functools.lru_cache(maxsize=512)
|
||||||
def _get_func_parameters(func, remove_first):
|
def _get_func_parameters(func, remove_first):
|
||||||
parameters = tuple(inspect.signature(func).parameters.values())
|
# As the annotations are not used in any case, inspect the signature with
|
||||||
|
# FORWARDREF to leave any deferred annotations unevaluated.
|
||||||
|
if PY314:
|
||||||
|
signature = inspect.signature(
|
||||||
|
func, annotation_format=annotationlib.Format.FORWARDREF
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
signature = inspect.signature(func)
|
||||||
|
|
||||||
|
parameters = tuple(signature.parameters.values())
|
||||||
if remove_first:
|
if remove_first:
|
||||||
parameters = parameters[1:]
|
parameters = parameters[1:]
|
||||||
return parameters
|
return parameters
|
||||||
|
|
|
||||||
|
|
@ -48,6 +48,9 @@ def format(
|
||||||
if abs(number) < cutoff:
|
if abs(number) < cutoff:
|
||||||
number = Decimal("0")
|
number = Decimal("0")
|
||||||
|
|
||||||
|
if not number.is_finite():
|
||||||
|
return str(number)
|
||||||
|
|
||||||
# Format values with more than 200 digits (an arbitrary cutoff) using
|
# Format values with more than 200 digits (an arbitrary cutoff) using
|
||||||
# scientific notation to avoid high memory usage in {:f}'.format().
|
# scientific notation to avoid high memory usage in {:f}'.format().
|
||||||
_, digits, exponent = number.as_tuple()
|
_, digits, exponent = number.as_tuple()
|
||||||
|
|
@ -91,15 +94,15 @@ def format(
|
||||||
# grouping is a single value
|
# grouping is a single value
|
||||||
intervals = [grouping, 0]
|
intervals = [grouping, 0]
|
||||||
active_interval = intervals.pop(0)
|
active_interval = intervals.pop(0)
|
||||||
int_part_gd = ""
|
int_part_gd = []
|
||||||
cnt = 0
|
cnt = 0
|
||||||
for digit in int_part[::-1]:
|
for digit in int_part[::-1]:
|
||||||
if cnt and cnt == active_interval:
|
if cnt and cnt == active_interval:
|
||||||
if intervals:
|
if intervals:
|
||||||
active_interval = intervals.pop(0) or active_interval
|
active_interval = intervals.pop(0) or active_interval
|
||||||
int_part_gd += thousand_sep[::-1]
|
int_part_gd.append(thousand_sep[::-1])
|
||||||
cnt = 0
|
cnt = 0
|
||||||
int_part_gd += digit
|
int_part_gd.append(digit)
|
||||||
cnt += 1
|
cnt += 1
|
||||||
int_part = int_part_gd[::-1]
|
int_part = "".join(int_part_gd)[::-1]
|
||||||
return sign + int_part + dec_part
|
return sign + int_part + dec_part
|
||||||
|
|
|
||||||
|
|
@ -103,7 +103,7 @@ class TruncateHTMLParser(HTMLParser):
|
||||||
def __init__(self, *, length, replacement, convert_charrefs=True):
|
def __init__(self, *, length, replacement, convert_charrefs=True):
|
||||||
super().__init__(convert_charrefs=convert_charrefs)
|
super().__init__(convert_charrefs=convert_charrefs)
|
||||||
self.tags = deque()
|
self.tags = deque()
|
||||||
self.output = ""
|
self.output = []
|
||||||
self.remaining = length
|
self.remaining = length
|
||||||
self.replacement = replacement
|
self.replacement = replacement
|
||||||
|
|
||||||
|
|
@ -119,13 +119,13 @@ class TruncateHTMLParser(HTMLParser):
|
||||||
self.handle_endtag(tag)
|
self.handle_endtag(tag)
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
def handle_starttag(self, tag, attrs):
|
||||||
self.output += self.get_starttag_text()
|
self.output.append(self.get_starttag_text())
|
||||||
if tag not in self.void_elements:
|
if tag not in self.void_elements:
|
||||||
self.tags.appendleft(tag)
|
self.tags.appendleft(tag)
|
||||||
|
|
||||||
def handle_endtag(self, tag):
|
def handle_endtag(self, tag):
|
||||||
if tag not in self.void_elements:
|
if tag not in self.void_elements:
|
||||||
self.output += f"</{tag}>"
|
self.output.append(f"</{tag}>")
|
||||||
try:
|
try:
|
||||||
self.tags.remove(tag)
|
self.tags.remove(tag)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
|
@ -136,16 +136,16 @@ class TruncateHTMLParser(HTMLParser):
|
||||||
data_len = len(data)
|
data_len = len(data)
|
||||||
if self.remaining < data_len:
|
if self.remaining < data_len:
|
||||||
self.remaining = 0
|
self.remaining = 0
|
||||||
self.output += add_truncation_text(output, self.replacement)
|
self.output.append(add_truncation_text(output, self.replacement))
|
||||||
raise self.TruncationCompleted
|
raise self.TruncationCompleted
|
||||||
self.remaining -= data_len
|
self.remaining -= data_len
|
||||||
self.output += output
|
self.output.append(output)
|
||||||
|
|
||||||
def feed(self, data):
|
def feed(self, data):
|
||||||
try:
|
try:
|
||||||
super().feed(data)
|
super().feed(data)
|
||||||
except self.TruncationCompleted:
|
except self.TruncationCompleted:
|
||||||
self.output += "".join([f"</{tag}>" for tag in self.tags])
|
self.output.extend([f"</{tag}>" for tag in self.tags])
|
||||||
self.tags.clear()
|
self.tags.clear()
|
||||||
self.reset()
|
self.reset()
|
||||||
else:
|
else:
|
||||||
|
|
@ -166,9 +166,9 @@ class TruncateCharsHTMLParser(TruncateHTMLParser):
|
||||||
def process(self, data):
|
def process(self, data):
|
||||||
self.processed_chars += len(data)
|
self.processed_chars += len(data)
|
||||||
if (self.processed_chars == self.length) and (
|
if (self.processed_chars == self.length) and (
|
||||||
len(self.output) + len(data) == len(self.rawdata)
|
sum(len(p) for p in self.output) + len(data) == len(self.rawdata)
|
||||||
):
|
):
|
||||||
self.output += data
|
self.output.append(data)
|
||||||
raise self.TruncationCompleted
|
raise self.TruncationCompleted
|
||||||
output = escape("".join(data[: self.remaining]))
|
output = escape("".join(data[: self.remaining]))
|
||||||
return data, output
|
return data, output
|
||||||
|
|
@ -213,7 +213,7 @@ class Truncator(SimpleLazyObject):
|
||||||
parser = TruncateCharsHTMLParser(length=length, replacement=truncate)
|
parser = TruncateCharsHTMLParser(length=length, replacement=truncate)
|
||||||
parser.feed(text)
|
parser.feed(text)
|
||||||
parser.close()
|
parser.close()
|
||||||
return parser.output
|
return "".join(parser.output)
|
||||||
return self._text_chars(length, truncate, text)
|
return self._text_chars(length, truncate, text)
|
||||||
|
|
||||||
def _text_chars(self, length, truncate, text):
|
def _text_chars(self, length, truncate, text):
|
||||||
|
|
@ -250,7 +250,7 @@ class Truncator(SimpleLazyObject):
|
||||||
parser = TruncateWordsHTMLParser(length=length, replacement=truncate)
|
parser = TruncateWordsHTMLParser(length=length, replacement=truncate)
|
||||||
parser.feed(self._wrapped)
|
parser.feed(self._wrapped)
|
||||||
parser.close()
|
parser.close()
|
||||||
return parser.output
|
return "".join(parser.output)
|
||||||
return self._text_words(length, truncate)
|
return self._text_words(length, truncate)
|
||||||
|
|
||||||
def _text_words(self, length, truncate):
|
def _text_words(self, length, truncate):
|
||||||
|
|
|
||||||
4
docs/internals/_images/contribution_process.svg
Normal file
4
docs/internals/_images/contribution_process.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 224 KiB |
Binary file not shown.
Binary file not shown.
|
|
@ -1,282 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xl="http://www.w3.org/1999/xlink" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="59.5 134.5 385 491.5" width="385" height="491.5">
|
|
||||||
<defs>
|
|
||||||
<filter id="Shadow" filterUnits="userSpaceOnUse" x="21" y="85.35">
|
|
||||||
<feGaussianBlur in="SourceAlpha" result="blur" stdDeviation="1.308"/>
|
|
||||||
<feOffset in="blur" result="offset" dx="0" dy="2"/>
|
|
||||||
<feFlood flood-color="black" flood-opacity=".5" result="flood"/>
|
|
||||||
<feComposite in="flood" in2="offset" operator="in"/>
|
|
||||||
</filter>
|
|
||||||
<marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="FilledArrow_Marker" stroke-linejoin="miter" stroke-miterlimit="10" viewBox="-1 -3 7 6" markerWidth="7" markerHeight="6" color="#008f00">
|
|
||||||
<g>
|
|
||||||
<path d="M 4.8 0 L 0 -1.8 L 0 1.8 Z" fill="currentColor" stroke="currentColor" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
</marker>
|
|
||||||
<marker orient="auto" overflow="visible" markerUnits="strokeWidth" id="FilledArrow_Marker_2" stroke-linejoin="miter" stroke-miterlimit="10" viewBox="-1 -3 7 6" markerWidth="7" markerHeight="6" color="#005493">
|
|
||||||
<g>
|
|
||||||
<path d="M 4.8 0 L 0 -1.8 L 0 1.8 Z" fill="currentColor" stroke="currentColor" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
</marker>
|
|
||||||
</defs>
|
|
||||||
<g id="Canevas_1" stroke="none" fill="none" fill-opacity="1" stroke-dasharray="none" stroke-opacity="1">
|
|
||||||
<title>Canevas 1</title>
|
|
||||||
<rect fill="white" x="59.5" y="134.5" width="385" height="491.5"/>
|
|
||||||
<g id="Canevas_1_Calque_1">
|
|
||||||
<title>Calque 1</title>
|
|
||||||
<g id="Graphic_88_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="72" y="576" width="162" height="36" fill="white"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_126_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="270" y="576" width="162" height="36" fill="white"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_32_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="288" y="216" width="144" height="288" fill="white"/>
|
|
||||||
<rect x="288" y="216" width="144" height="288" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_99_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="72" y="216" width="144" height="288" fill="white"/>
|
|
||||||
<rect x="72" y="216" width="144" height="288" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_10_shadow" filter="url(#Shadow)">
|
|
||||||
<path d="M 95 450 L 193 450 C 195.76142 450 198 452.2386 198 455 L 198 481 C 198 483.7614 195.76142 486 193 486 L 95 486 C 92.23858 486 90 483.7614 90 481 L 90 455 C 90 452.2386 92.23858 450 95 450 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 450 L 193 450 C 195.76142 450 198 452.2386 198 455 L 198 481 C 198 483.7614 195.76142 486 193 486 L 95 486 C 92.23858 486 90 483.7614 90 481 L 90 455 C 90 452.2386 92.23858 450 95 450 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_11_shadow" filter="url(#Shadow)">
|
|
||||||
<path d="M 95 360 L 193 360 C 195.76142 360 198 362.23858 198 365 L 198 391 C 198 393.76142 195.76142 396 193 396 L 95 396 C 92.23858 396 90 393.76142 90 391 L 90 365 C 90 362.23858 92.23858 360 95 360 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 360 L 193 360 C 195.76142 360 198 362.23858 198 365 L 198 391 C 198 393.76142 195.76142 396 193 396 L 95 396 C 92.23858 396 90 393.76142 90 391 L 90 365 C 90 362.23858 92.23858 360 95 360 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_12_shadow" filter="url(#Shadow)">
|
|
||||||
<path d="M 95 270 L 193 270 C 195.76142 270 198 272.23858 198 275 L 198 301 C 198 303.76142 195.76142 306 193 306 L 95 306 C 92.23858 306 90 303.76142 90 301 L 90 275 C 90 272.23858 92.23858 270 95 270 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 270 L 193 270 C 195.76142 270 198 272.23858 198 275 L 198 301 C 198 303.76142 195.76142 306 193 306 L 95 306 C 92.23858 306 90 303.76142 90 301 L 90 275 C 90 272.23858 92.23858 270 95 270 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_123_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="279" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="279" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_130_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="459" width="90" height="18" fill="#008f00" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="459" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_139_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="351" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="351" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_137_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="387" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="387" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_135_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="423" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="423" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_132_shadow" filter="url(#Shadow)">
|
|
||||||
<rect x="315" y="315" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="315" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_88">
|
|
||||||
<rect x="72" y="576" width="162" height="36" fill="white"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_126">
|
|
||||||
<rect x="270" y="576" width="162" height="36" fill="white"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_32">
|
|
||||||
<rect x="288" y="216" width="144" height="288" fill="white"/>
|
|
||||||
<rect x="288" y="216" width="144" height="288" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(293 221)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="14" fill="black" x="23.427734" y="14">Closed tickets</tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="40.984375" y="35">resolution</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_99">
|
|
||||||
<rect x="72" y="216" width="144" height="288" fill="white"/>
|
|
||||||
<rect x="72" y="216" width="144" height="288" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(77 221)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="14" fill="black" x="28.093262" y="14">Open tickets</tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="37.316406" y="35">triage state</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_10">
|
|
||||||
<path d="M 95 450 L 193 450 C 195.76142 450 198 452.2386 198 455 L 198 481 C 198 483.7614 195.76142 486 193 486 L 95 486 C 92.23858 486 90 483.7614 90 481 L 90 455 C 90 452.2386 92.23858 450 95 450 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 450 L 193 450 C 195.76142 450 198 452.2386 198 455 L 198 481 C 198 483.7614 195.76142 486 193 486 L 95 486 C 92.23858 486 90 483.7614 90 481 L 90 455 C 90 452.2386 92.23858 450 95 450 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(95 454)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="22.987305" y="11">Ready for </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="27.323242" y="25">Checkin</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_11">
|
|
||||||
<path d="M 95 360 L 193 360 C 195.76142 360 198 362.23858 198 365 L 198 391 C 198 393.76142 195.76142 396 193 396 L 95 396 C 92.23858 396 90 393.76142 90 391 L 90 365 C 90 362.23858 92.23858 360 95 360 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 360 L 193 360 C 195.76142 360 198 362.23858 198 365 L 198 391 C 198 393.76142 195.76142 396 193 396 L 95 396 C 92.23858 396 90 393.76142 90 391 L 90 365 C 90 362.23858 92.23858 360 95 360 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(95 371)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="23.983398" y="11">Accepted</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_12">
|
|
||||||
<path d="M 95 270 L 193 270 C 195.76142 270 198 272.23858 198 275 L 198 301 C 198 303.76142 195.76142 306 193 306 L 95 306 C 92.23858 306 90 303.76142 90 301 L 90 275 C 90 272.23858 92.23858 270 95 270 Z" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<path d="M 95 270 L 193 270 C 195.76142 270 198 272.23858 198 275 L 198 301 C 198 303.76142 195.76142 306 193 306 L 95 306 C 92.23858 306 90 303.76142 90 301 L 90 275 C 90 272.23858 92.23858 270 95 270 Z" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(95 281)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="17.31836" y="11">Unreviewed</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_123">
|
|
||||||
<rect x="315" y="279" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="279" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 281)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="15.982422" y="11">duplicate</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_130">
|
|
||||||
<rect x="315" y="459" width="90" height="18" fill="#008f00" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="459" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 461)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="27.326172" y="11">fixed</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_139">
|
|
||||||
<rect x="315" y="351" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="351" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 353)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="22.990234" y="11">invalid</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_137">
|
|
||||||
<rect x="315" y="387" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="387" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 389)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="13.978516" y="11">needsinfo</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_135">
|
|
||||||
<rect x="315" y="423" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="423" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 425)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="8.995117" y="11">worksforme</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_132">
|
|
||||||
<rect x="315" y="315" width="90" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<rect x="315" y="315" width="90" height="18" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
<text transform="translate(320 317)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="21.326172" y="11">wontfix</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Line_33">
|
|
||||||
<line x1="72" y1="243" x2="216" y2="243" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_36">
|
|
||||||
<line x1="288" y1="243" x2="432" y2="243" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_127">
|
|
||||||
<rect x="351" y="594" width="81" height="18" fill="#008f00" fill-opacity=".3"/>
|
|
||||||
<text transform="translate(356 596)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="7.817383" y="11">completed</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_125">
|
|
||||||
<rect x="351" y="576" width="81" height="18" fill="#ff2600" fill-opacity=".3"/>
|
|
||||||
<text transform="translate(356 578)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="14.148438" y="11">stopped</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_129">
|
|
||||||
<rect x="270" y="594" width="81" height="18" fill="#0096ff" fill-opacity=".3"/>
|
|
||||||
<text transform="translate(275 596)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="5.819336" y="11">in progress</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Line_42">
|
|
||||||
<line x1="183.6" y1="585" x2="208.5" y2="585" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_43">
|
|
||||||
<text transform="translate(72 578)" fill="#008f00">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#008f00" x="17.173828" y="11">Ticket triagers </tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Line_44">
|
|
||||||
<line x1="183.6" y1="603" x2="208.5" y2="603" marker-end="url(#FilledArrow_Marker_2)" stroke="#005493" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_45">
|
|
||||||
<text transform="translate(72 596)" fill="#005493">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#005493" x="52.3125" y="11">Mergers</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_128">
|
|
||||||
<rect x="270" y="576" width="81" height="18" fill="white"/>
|
|
||||||
<text transform="translate(275 578)" fill="black">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="black" x="19.492188" y="11">status</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Line_124">
|
|
||||||
<line x1="252" y1="288" x2="302.1" y2="288" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_140">
|
|
||||||
<line x1="252" y1="288" x2="306.5053" y2="350.29176" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_138">
|
|
||||||
<line x1="252" y1="288" x2="308.50006" y2="384.85725" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_136">
|
|
||||||
<line x1="252" y1="288" x2="309.82944" y2="420.18157" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_131">
|
|
||||||
<line x1="198" y1="468" x2="302.1" y2="468" marker-end="url(#FilledArrow_Marker_2)" stroke="#005493" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_54">
|
|
||||||
<line x1="144" y1="306" x2="144" y2="347.1" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_82">
|
|
||||||
<line x1="198" y1="288" x2="252" y2="288" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_60">
|
|
||||||
<line x1="144" y1="396" x2="144" y2="437.1" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_89">
|
|
||||||
<rect x="189" y="144" width="243" height="54" fill="white"/>
|
|
||||||
<path d="M 432 198 L 189 198 L 189 144 L 432 144 Z" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
<text transform="translate(193 150)" fill="#595959">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="19.789062" y="11">The ticket was already reported, was </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x=".8017578" y="25">already rejected, isn't a bug, doesn't contain </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="1.2792969" y="39">enough information, or can't be reproduced.</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Line_90">
|
|
||||||
<line x1="252" y1="278.5" x2="252" y2="198" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_91">
|
|
||||||
<path d="M 258.36395 281.63605 C 261.8787 285.15076 261.8787 290.84924 258.36395 294.36395 C 254.84924 297.8787 249.15076 297.8787 245.63605 294.36395 C 242.1213 290.84924 242.1213 285.15076 245.63605 281.63605 C 249.15076 278.1213 254.84924 278.1213 258.36395 281.63605" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_96">
|
|
||||||
<rect x="72" y="144" width="99" height="54" fill="white"/>
|
|
||||||
<path d="M 171 198 L 72 198 L 72 144 L 171 144 Z" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
<text transform="translate(76 150)" fill="#595959">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="8.486328" y="11">The ticket is a </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="4.463867" y="25">bug and should </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="22.81836" y="39">be fixed.</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_97">
|
|
||||||
<path d="M 150.36395 317.63605 C 153.87869 321.15076 153.87869 326.84924 150.36395 330.36395 C 146.84924 333.8787 141.15076 333.8787 137.63605 330.36395 C 134.12131 326.84924 134.12131 321.15076 137.63605 317.63605 C 141.15076 314.1213 146.84924 314.1213 150.36395 317.63605" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_98">
|
|
||||||
<path d="M 134.5 324 L 81 324 L 81 198" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_102">
|
|
||||||
<rect x="72" y="522" width="342" height="36" fill="white"/>
|
|
||||||
<path d="M 414 558 L 72 558 L 72 522 L 414 522 Z" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
<text transform="translate(76 526)" fill="#595959">
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="7.241211" y="11">The ticket has a patch which applies cleanly and includes all </tspan>
|
|
||||||
<tspan font-family="Helvetica" font-size="12" fill="#595959" x="26.591797" y="25">needed tests and docs. A merger can commit it as is.</tspan>
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
<g id="Graphic_103">
|
|
||||||
<path d="M 150.36395 407.63605 C 153.87869 411.15076 153.87869 416.84924 150.36395 420.36395 C 146.84924 423.8787 141.15076 423.8787 137.63605 420.36395 C 134.12131 416.84924 134.12131 411.15076 137.63605 407.63605 C 141.15076 404.1213 146.84924 404.1213 150.36395 407.63605" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_104">
|
|
||||||
<path d="M 134.5 414 L 81 414 L 81 522" stroke="#595959" stroke-linecap="round" stroke-linejoin="round" stroke-dasharray="4.0,4.0" stroke-width="1"/>
|
|
||||||
</g>
|
|
||||||
<g id="Line_151">
|
|
||||||
<line x1="252" y1="288" x2="303.79966" y2="317.5998" marker-end="url(#FilledArrow_Marker)" stroke="#008f00" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"/>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
|
Before Width: | Height: | Size: 20 KiB |
|
|
@ -28,7 +28,7 @@ If an `unreviewed ticket`_ reports a bug, try and reproduce it. If you can
|
||||||
reproduce it and it seems valid, make a note that you confirmed the bug and
|
reproduce it and it seems valid, make a note that you confirmed the bug and
|
||||||
accept the ticket. Make sure the ticket is filed under the correct component
|
accept the ticket. Make sure the ticket is filed under the correct component
|
||||||
area. Consider writing a patch that adds a test for the bug's behavior, even if
|
area. Consider writing a patch that adds a test for the bug's behavior, even if
|
||||||
you don't fix the bug itself. See more at :ref:`how-can-i-help-with-triaging`
|
you don't fix the bug itself. See more at :ref:`how-can-i-help-with-triaging`.
|
||||||
|
|
||||||
Review patches of accepted tickets
|
Review patches of accepted tickets
|
||||||
----------------------------------
|
----------------------------------
|
||||||
|
|
|
||||||
|
|
@ -49,47 +49,58 @@ attribute easily tells us what and who each ticket is waiting on.
|
||||||
|
|
||||||
Since a picture is worth a thousand words, let's start there:
|
Since a picture is worth a thousand words, let's start there:
|
||||||
|
|
||||||
.. image:: /internals/_images/triage_process.*
|
.. image:: /internals/_images/contribution_process.*
|
||||||
:height: 750
|
|
||||||
:width: 600
|
|
||||||
:alt: Django's ticket triage workflow
|
:alt: Django's ticket triage workflow
|
||||||
|
|
||||||
We've got two roles in this diagram:
|
We have four roles in this diagram. Maintainers (also known as Fellows) usually
|
||||||
|
take part in all of them, but anyone in the Django community can participate in
|
||||||
|
any role except merger. The :ref:`merger role <mergers-team>` is granted by a
|
||||||
|
vote of the :ref:`Steering Council <steering-council>`.
|
||||||
|
|
||||||
* Mergers: people with commit access who are responsible for making the
|
* Triagers: anyone can take on this role by checking whether a ticket describes
|
||||||
final decision to merge a change.
|
a real issue and keeping the tracker organized.
|
||||||
|
|
||||||
* Ticket triagers: anyone in the Django community who chooses to
|
* Bug fixers: anyone can contribute by opening a pull request and working on a
|
||||||
become involved in Django's development process. Our Trac installation
|
solution for a ticket.
|
||||||
is intentionally left open to the public, and anyone can triage tickets.
|
|
||||||
Django is a community project, and we encourage :ref:`triage by the
|
|
||||||
community<how-can-i-help-with-triaging>`.
|
|
||||||
|
|
||||||
By way of example, here we see the lifecycle of an average ticket:
|
* Reviewers: anyone can review pull requests and suggest improvements.
|
||||||
|
|
||||||
* Alice creates a ticket and sends an incomplete pull request (no tests,
|
* Mergers: people with commit access who make the final decision to merge a
|
||||||
|
change.
|
||||||
|
|
||||||
|
Our Trac system is intentionally open to the public, and anyone can help by
|
||||||
|
working on tickets. Django is a community project, and we encourage
|
||||||
|
:ref:`triage and collaboration by the community
|
||||||
|
<how-can-i-help-with-triaging>`. This could be you!
|
||||||
|
|
||||||
|
For example, here's the typical lifecycle of a ticket:
|
||||||
|
|
||||||
|
* Alice creates a ticket and opens an incomplete pull request (missing tests,
|
||||||
incorrect implementation).
|
incorrect implementation).
|
||||||
|
|
||||||
* Bob reviews the pull request, marks the ticket as "Accepted", "needs tests",
|
* Bob reviews the pull request, marks the ticket as "Accepted", sets the
|
||||||
and "patch needs improvement", and leaves a comment telling Alice how the
|
flags "needs tests" and "patch needs improvement", and leaves a comment
|
||||||
patch could be improved.
|
explaining how Alice can improve the patch. This puts the ticket
|
||||||
|
automatically into the "waiting on author" queue within the "accepted" stage.
|
||||||
|
|
||||||
* Alice updates the pull request, adding tests (but not changing the
|
* Alice updates the pull request, adding tests (but not yet fixing the
|
||||||
implementation). She removes the two flags.
|
implementation), and removes the two flags. The ticket moves into the "needs
|
||||||
|
PR review" queue.
|
||||||
|
|
||||||
* Charlie reviews the pull request and resets the "patch needs improvement"
|
* Charlie reviews the pull request, sets the "patch needs improvement" flag
|
||||||
flag with another comment about improving the implementation.
|
again, and leaves another comment suggesting changes to the implementation.
|
||||||
|
The ticket moves back to the "waiting on author" queue.
|
||||||
|
|
||||||
* Alice updates the pull request, fixing the implementation. She removes the
|
* Alice updates the pull request again, this time fixing the implementation,
|
||||||
"patch needs improvement" flag.
|
and removes the "patch needs improvement" flag. The ticket moves once more
|
||||||
|
into the "needs PR review" queue.
|
||||||
|
|
||||||
* Daisy reviews the pull request and marks the ticket as "Ready for checkin".
|
* Daisy reviews the pull request and marks the ticket as "Ready for checkin".
|
||||||
|
|
||||||
* Jacob, a :ref:`merger <mergers-team>`, reviews the pull request and merges
|
* Jacob, a :ref:`merger <mergers-team>`, reviews and merges the pull request.
|
||||||
it.
|
|
||||||
|
|
||||||
Some tickets require much less feedback than this, but then again some tickets
|
Some tickets move through these steps quickly, while others take more time and
|
||||||
require much much more.
|
discussion. Each contribution helps Django improve.
|
||||||
|
|
||||||
.. _triage-stages:
|
.. _triage-stages:
|
||||||
|
|
||||||
|
|
@ -104,16 +115,15 @@ Unreviewed
|
||||||
|
|
||||||
The ticket has not been reviewed by anyone who felt qualified to make a
|
The ticket has not been reviewed by anyone who felt qualified to make a
|
||||||
judgment about whether the ticket contained a valid issue or ought to be closed
|
judgment about whether the ticket contained a valid issue or ought to be closed
|
||||||
for any of the various reasons.
|
for any reasons. Unreviewed tickets appear in the "triage" queue.
|
||||||
|
|
||||||
Accepted
|
Accepted
|
||||||
--------
|
--------
|
||||||
|
|
||||||
The big gray area! The absolute meaning of "accepted" is that the issue
|
The absolute meaning of "accepted" is that the issue described in the ticket is
|
||||||
described in the ticket is valid and is in some stage of being worked on.
|
valid and actionable. It is broken out into three queues:
|
||||||
Beyond that there are several considerations:
|
|
||||||
|
|
||||||
* **Accepted + No Flags**
|
* **Needs Patch** (Accepted + No Flags)
|
||||||
|
|
||||||
The ticket is valid, but no one has submitted a patch for it yet. Often this
|
The ticket is valid, but no one has submitted a patch for it yet. Often this
|
||||||
means you could safely start writing a fix for it. This is generally more
|
means you could safely start writing a fix for it. This is generally more
|
||||||
|
|
@ -126,14 +136,14 @@ Beyond that there are several considerations:
|
||||||
<requesting-features>` and received community and :ref:`Steering Council
|
<requesting-features>` and received community and :ref:`Steering Council
|
||||||
<steering-council>` approval, or been accepted in a DEP.
|
<steering-council>` approval, or been accepted in a DEP.
|
||||||
|
|
||||||
* **Accepted + Has Patch**
|
* **Needs PR Review** (Accepted + Has Patch)
|
||||||
|
|
||||||
The ticket is waiting for people to review the supplied solution. This means
|
The ticket is waiting for people to review the supplied solution. This means
|
||||||
downloading the patch and trying it out, verifying that it contains tests
|
downloading the patch and trying it out, verifying that it contains tests
|
||||||
and docs, running the test suite with the included patch, and leaving
|
and docs, running the test suite with the included patch, and leaving
|
||||||
feedback on the ticket.
|
feedback on the ticket.
|
||||||
|
|
||||||
* **Accepted + Has Patch + Needs ...**
|
* **Waiting On Author** (Accepted + Has Patch + Needs fixes)
|
||||||
|
|
||||||
This means the ticket has been reviewed, and has been found to need further
|
This means the ticket has been reviewed, and has been found to need further
|
||||||
work. "Needs tests" and "Needs documentation" are self-explanatory. "Patch
|
work. "Needs tests" and "Needs documentation" are self-explanatory. "Patch
|
||||||
|
|
@ -339,10 +349,10 @@ bring the issue to the `Django Forum`_ instead.
|
||||||
|
|
||||||
.. _how-can-i-help-with-triaging:
|
.. _how-can-i-help-with-triaging:
|
||||||
|
|
||||||
How can I help with triaging?
|
How can I help with development?
|
||||||
=============================
|
================================
|
||||||
|
|
||||||
The triage process is primarily driven by community members. Really,
|
The development process is primarily driven by community members. Really,
|
||||||
**ANYONE** can help.
|
**ANYONE** can help.
|
||||||
|
|
||||||
To get involved, start by `creating an account on Trac`_. If you have an
|
To get involved, start by `creating an account on Trac`_. If you have an
|
||||||
|
|
|
||||||
|
|
@ -196,6 +196,8 @@ Model fields
|
||||||
* **fields.E133**: ``max_digits`` must be a positive integer.
|
* **fields.E133**: ``max_digits`` must be a positive integer.
|
||||||
* **fields.E134**: ``max_digits`` must be greater or equal to
|
* **fields.E134**: ``max_digits`` must be greater or equal to
|
||||||
``decimal_places``.
|
``decimal_places``.
|
||||||
|
* **fields.E135**: ``DecimalField``’s ``max_digits`` and ``decimal_places``
|
||||||
|
must both be defined or both omitted.
|
||||||
* **fields.E140**: ``FilePathField``\s must have either ``allow_files`` or
|
* **fields.E140**: ``FilePathField``\s must have either ``allow_files`` or
|
||||||
``allow_folders`` set to True.
|
``allow_folders`` set to True.
|
||||||
* **fields.E150**: ``GenericIPAddressField``\s cannot have ``blank=True`` if
|
* **fields.E150**: ``GenericIPAddressField``\s cannot have ``blank=True`` if
|
||||||
|
|
|
||||||
|
|
@ -1223,7 +1223,7 @@ Subclassing the built-in database backends
|
||||||
==========================================
|
==========================================
|
||||||
|
|
||||||
Django comes with built-in database backends. You may subclass an existing
|
Django comes with built-in database backends. You may subclass an existing
|
||||||
database backends to modify its behavior, features, or configuration.
|
database backend to modify its behavior, features, or configuration.
|
||||||
|
|
||||||
Consider, for example, that you need to change a single database feature.
|
Consider, for example, that you need to change a single database feature.
|
||||||
First, you have to create a new directory with a ``base`` module in it. For
|
First, you have to create a new directory with a ``base`` module in it. For
|
||||||
|
|
|
||||||
|
|
@ -257,3 +257,12 @@ The ``Storage`` class
|
||||||
Returns the URL where the contents of the file referenced by ``name``
|
Returns the URL where the contents of the file referenced by ``name``
|
||||||
can be accessed. For storage systems that don't support access by URL
|
can be accessed. For storage systems that don't support access by URL
|
||||||
this will raise ``NotImplementedError`` instead.
|
this will raise ``NotImplementedError`` instead.
|
||||||
|
|
||||||
|
.. admonition:: There are community-maintained solutions too!
|
||||||
|
|
||||||
|
Django has a vibrant ecosystem. There are storage backends
|
||||||
|
highlighted on the `Community Ecosystem`_ page. The Django Packages
|
||||||
|
`Storage Backends grid`_ has even more options for you!
|
||||||
|
|
||||||
|
.. _Community Ecosystem: https://www.djangoproject.com/community/ecosystem/#storage-and-static-files
|
||||||
|
.. _Storage Backends grid: https://djangopackages.org/grids/g/storage-backends/
|
||||||
|
|
|
||||||
|
|
@ -862,16 +862,22 @@ A fixed-precision decimal number, represented in Python by a
|
||||||
:class:`~decimal.Decimal` instance. It validates the input using
|
:class:`~decimal.Decimal` instance. It validates the input using
|
||||||
:class:`~django.core.validators.DecimalValidator`.
|
:class:`~django.core.validators.DecimalValidator`.
|
||||||
|
|
||||||
Has the following **required** arguments:
|
Has the following arguments:
|
||||||
|
|
||||||
.. attribute:: DecimalField.max_digits
|
.. attribute:: DecimalField.max_digits
|
||||||
|
|
||||||
The maximum number of digits allowed in the number. Note that this number
|
The maximum number of digits allowed in the number. Note that this number
|
||||||
must be greater than or equal to ``decimal_places``.
|
must be greater than or equal to ``decimal_places``. It's always required
|
||||||
|
on MySQL because this database doesn't support numeric fields with no
|
||||||
|
precision. It's also required for all database backends when
|
||||||
|
:attr:`~DecimalField.decimal_places` is provided.
|
||||||
|
|
||||||
.. attribute:: DecimalField.decimal_places
|
.. attribute:: DecimalField.decimal_places
|
||||||
|
|
||||||
The number of decimal places to store with the number.
|
The number of decimal places to store with the number. It's always required
|
||||||
|
on MySQL because this database doesn't support numeric fields with no
|
||||||
|
precision. It's also required for all database backends when
|
||||||
|
:attr:`~DecimalField.max_digits` is provided.
|
||||||
|
|
||||||
For example, to store numbers up to ``999.99`` with a resolution of 2 decimal
|
For example, to store numbers up to ``999.99`` with a resolution of 2 decimal
|
||||||
places, you'd use::
|
places, you'd use::
|
||||||
|
|
@ -895,6 +901,11 @@ when :attr:`~django.forms.Field.localize` is ``False`` or
|
||||||
should also be aware of :ref:`SQLite limitations <sqlite-decimal-handling>`
|
should also be aware of :ref:`SQLite limitations <sqlite-decimal-handling>`
|
||||||
of decimal fields.
|
of decimal fields.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.1
|
||||||
|
|
||||||
|
Support for ``DecimalField`` with no precision was added on Oracle,
|
||||||
|
PostgreSQL, and SQLite.
|
||||||
|
|
||||||
``DurationField``
|
``DurationField``
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
|
|
@ -2523,8 +2534,8 @@ Field API reference
|
||||||
.. method:: get_db_prep_value(value, connection, prepared=False)
|
.. method:: get_db_prep_value(value, connection, prepared=False)
|
||||||
|
|
||||||
Converts ``value`` to a backend-specific value. By default it returns
|
Converts ``value`` to a backend-specific value. By default it returns
|
||||||
``value`` if ``prepared=True`` and :meth:`~Field.get_prep_value` if is
|
``value`` if ``prepared=True``, and :meth:`get_prep_value(value)
|
||||||
``False``.
|
<Field.get_prep_value>` otherwise.
|
||||||
|
|
||||||
See :ref:`converting-query-values-to-database-values` for usage.
|
See :ref:`converting-query-values-to-database-values` for usage.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -312,6 +312,12 @@ not be looking at your Django code. For example::
|
||||||
|
|
||||||
ordering = [F("author").asc(nulls_last=True)]
|
ordering = [F("author").asc(nulls_last=True)]
|
||||||
|
|
||||||
|
.. admonition:: Default ordering and GROUP BY
|
||||||
|
|
||||||
|
In :ref:`GROUP BY queries <aggregation-ordering-interaction>` (for example,
|
||||||
|
those using :meth:`~.QuerySet.values` and :meth:`~.QuerySet.annotate`), the
|
||||||
|
default ordering is not applied.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Ordering is not a free operation. Each field you add to the ordering
|
Ordering is not a free operation. Each field you add to the ordering
|
||||||
|
|
|
||||||
|
|
@ -133,8 +133,8 @@ to, or in lieu of custom ``field.clean()`` methods.
|
||||||
:param code: If not ``None``, overrides :attr:`code`.
|
:param code: If not ``None``, overrides :attr:`code`.
|
||||||
:param allowlist: If not ``None``, overrides :attr:`allowlist`.
|
:param allowlist: If not ``None``, overrides :attr:`allowlist`.
|
||||||
|
|
||||||
An :class:`EmailValidator` ensures that a value looks like an email, and
|
An :class:`EmailValidator` ensures that a value looks like an email
|
||||||
raises a :exc:`~django.core.exceptions.ValidationError` with
|
address, and raises a :exc:`~django.core.exceptions.ValidationError` with
|
||||||
:attr:`message` and :attr:`code` if it doesn't. Values longer than 320
|
:attr:`message` and :attr:`code` if it doesn't. Values longer than 320
|
||||||
characters are always considered invalid.
|
characters are always considered invalid.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,4 +7,19 @@ Django 4.2.26 release notes
|
||||||
Django 4.2.26 fixes one security issue with severity "high" and one security
|
Django 4.2.26 fixes one security issue with severity "high" and one security
|
||||||
issue with severity "moderate" in 4.2.25.
|
issue with severity "moderate" in 4.2.25.
|
||||||
|
|
||||||
...
|
CVE-2025-64458: Potential denial-of-service vulnerability in ``HttpResponseRedirect`` and ``HttpResponsePermanentRedirect`` on Windows
|
||||||
|
======================================================================================================================================
|
||||||
|
|
||||||
|
Python's :func:`NFKC normalization <python:unicodedata.normalize>` is slow on
|
||||||
|
Windows. As a consequence, :class:`~django.http.HttpResponseRedirect`,
|
||||||
|
:class:`~django.http.HttpResponsePermanentRedirect`, and the shortcut
|
||||||
|
:func:`redirect() <django.shortcuts.redirect>` were subject to a potential
|
||||||
|
denial-of-service attack via certain inputs with a very large number of Unicode
|
||||||
|
characters (follow up to :cve:`2025-27556`).
|
||||||
|
|
||||||
|
CVE-2025-64459: Potential SQL injection via ``_connector`` keyword argument
|
||||||
|
===========================================================================
|
||||||
|
|
||||||
|
:meth:`.QuerySet.filter`, :meth:`~.QuerySet.exclude`, :meth:`~.QuerySet.get`,
|
||||||
|
and :class:`~.Q` were subject to SQL injection using a suitably crafted
|
||||||
|
dictionary, with dictionary expansion, as the ``_connector`` argument.
|
||||||
|
|
|
||||||
|
|
@ -7,4 +7,19 @@ Django 5.1.14 release notes
|
||||||
Django 5.1.14 fixes one security issue with severity "high" and one security
|
Django 5.1.14 fixes one security issue with severity "high" and one security
|
||||||
issue with severity "moderate" in 5.1.13.
|
issue with severity "moderate" in 5.1.13.
|
||||||
|
|
||||||
...
|
CVE-2025-64458: Potential denial-of-service vulnerability in ``HttpResponseRedirect`` and ``HttpResponsePermanentRedirect`` on Windows
|
||||||
|
======================================================================================================================================
|
||||||
|
|
||||||
|
Python's :func:`NFKC normalization <python:unicodedata.normalize>` is slow on
|
||||||
|
Windows. As a consequence, :class:`~django.http.HttpResponseRedirect`,
|
||||||
|
:class:`~django.http.HttpResponsePermanentRedirect`, and the shortcut
|
||||||
|
:func:`redirect() <django.shortcuts.redirect>` were subject to a potential
|
||||||
|
denial-of-service attack via certain inputs with a very large number of Unicode
|
||||||
|
characters (follow up to :cve:`2025-27556`).
|
||||||
|
|
||||||
|
CVE-2025-64459: Potential SQL injection via ``_connector`` keyword argument
|
||||||
|
===========================================================================
|
||||||
|
|
||||||
|
:meth:`.QuerySet.filter`, :meth:`~.QuerySet.exclude`, :meth:`~.QuerySet.get`,
|
||||||
|
and :class:`~.Q` were subject to SQL injection using a suitably crafted
|
||||||
|
dictionary, with dictionary expansion, as the ``_connector`` argument.
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,23 @@ Django 5.2.8 fixes one security issue with severity "high", one security issue
|
||||||
with severity "moderate", and several bugs in 5.2.7. It also adds compatibility
|
with severity "moderate", and several bugs in 5.2.7. It also adds compatibility
|
||||||
with Python 3.14.
|
with Python 3.14.
|
||||||
|
|
||||||
|
CVE-2025-64458: Potential denial-of-service vulnerability in ``HttpResponseRedirect`` and ``HttpResponsePermanentRedirect`` on Windows
|
||||||
|
======================================================================================================================================
|
||||||
|
|
||||||
|
Python's :func:`NFKC normalization <python:unicodedata.normalize>` is slow on
|
||||||
|
Windows. As a consequence, :class:`~django.http.HttpResponseRedirect`,
|
||||||
|
:class:`~django.http.HttpResponsePermanentRedirect`, and the shortcut
|
||||||
|
:func:`redirect() <django.shortcuts.redirect>` were subject to a potential
|
||||||
|
denial-of-service attack via certain inputs with a very large number of Unicode
|
||||||
|
characters (follow up to :cve:`2025-27556`).
|
||||||
|
|
||||||
|
CVE-2025-64459: Potential SQL injection via ``_connector`` keyword argument
|
||||||
|
===========================================================================
|
||||||
|
|
||||||
|
:meth:`.QuerySet.filter`, :meth:`~.QuerySet.exclude`, :meth:`~.QuerySet.get`,
|
||||||
|
and :class:`~.Q` were subject to SQL injection using a suitably crafted
|
||||||
|
dictionary, with dictionary expansion, as the ``_connector`` argument.
|
||||||
|
|
||||||
Bugfixes
|
Bugfixes
|
||||||
========
|
========
|
||||||
|
|
||||||
|
|
@ -16,3 +33,6 @@ Bugfixes
|
||||||
* Fixed a bug in Django 5.2 where ``QuerySet.first()`` and ``QuerySet.last()``
|
* Fixed a bug in Django 5.2 where ``QuerySet.first()`` and ``QuerySet.last()``
|
||||||
raised an error on querysets performing aggregation that selected all fields
|
raised an error on querysets performing aggregation that selected all fields
|
||||||
of a composite primary key.
|
of a composite primary key.
|
||||||
|
|
||||||
|
* Fixed a bug in Django 5.2 where proxy models having a ``CompositePrimaryKey``
|
||||||
|
incorrectly raised a ``models.E042`` system check error.
|
||||||
|
|
|
||||||
12
docs/releases/5.2.9.txt
Normal file
12
docs/releases/5.2.9.txt
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
==========================
|
||||||
|
Django 5.2.9 release notes
|
||||||
|
==========================
|
||||||
|
|
||||||
|
*Expected December 2, 2025*
|
||||||
|
|
||||||
|
Django 5.2.9 fixes several bugs in 5.2.8.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
========
|
||||||
|
|
||||||
|
* ...
|
||||||
|
|
@ -92,7 +92,8 @@ Minor features
|
||||||
:mod:`django.contrib.admin`
|
:mod:`django.contrib.admin`
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
* ...
|
* The admin site login view now redirects authenticated users to the next URL,
|
||||||
|
if available, instead of always redirecting to the admin index page.
|
||||||
|
|
||||||
:mod:`django.contrib.admindocs`
|
:mod:`django.contrib.admindocs`
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
@ -125,7 +126,9 @@ Minor features
|
||||||
:mod:`django.contrib.postgres`
|
:mod:`django.contrib.postgres`
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
* ...
|
* :djadmin:`inspectdb` now introspects
|
||||||
|
:class:`~django.contrib.postgres.fields.HStoreField` when ``psycopg`` 3.2+ is
|
||||||
|
installed and ``django.contrib.postgres`` is in :setting:`INSTALLED_APPS`.
|
||||||
|
|
||||||
:mod:`django.contrib.redirects`
|
:mod:`django.contrib.redirects`
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
@ -246,6 +249,11 @@ Models
|
||||||
top-level or nested JSON ``null`` values. See
|
top-level or nested JSON ``null`` values. See
|
||||||
:ref:`storing-and-querying-for-none` for usage examples and some caveats.
|
:ref:`storing-and-querying-for-none` for usage examples and some caveats.
|
||||||
|
|
||||||
|
* :attr:`DecimalField.max_digits <django.db.models.DecimalField.max_digits>`
|
||||||
|
and :attr:`DecimalField.decimal_places
|
||||||
|
<django.db.models.DecimalField.decimal_places>` are no longer required to be
|
||||||
|
set on Oracle, PostgreSQL, and SQLite.
|
||||||
|
|
||||||
Pagination
|
Pagination
|
||||||
~~~~~~~~~~
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
|
@ -316,6 +324,11 @@ backends.
|
||||||
database has native support for ``DurationField``, override this method to
|
database has native support for ``DurationField``, override this method to
|
||||||
simply return the value.
|
simply return the value.
|
||||||
|
|
||||||
|
* The ``DatabaseIntrospection.get_relations()`` should now return a dictionary
|
||||||
|
with 3-tuples containing (``field_name_other_table``, ``other_table``,
|
||||||
|
``db_on_delete``) as values. ``db_on_delete`` is one of the database-level
|
||||||
|
delete options e.g. :attr:`~django.db.models.DB_CASCADE`.
|
||||||
|
|
||||||
:mod:`django.contrib.gis`
|
:mod:`django.contrib.gis`
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,6 +39,7 @@ versions of the documentation contain the release notes for any later releases.
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
5.2.9
|
||||||
5.2.8
|
5.2.8
|
||||||
5.2.7
|
5.2.7
|
||||||
5.2.6
|
5.2.6
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,30 @@ Issues under Django's security process
|
||||||
All security issues have been handled under versions of Django's security
|
All security issues have been handled under versions of Django's security
|
||||||
process. These are listed below.
|
process. These are listed below.
|
||||||
|
|
||||||
|
November 5, 2025 - :cve:`2025-64458`
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
Potential denial-of-service vulnerability in ``HttpResponseRedirect`` and
|
||||||
|
``HttpResponsePermanentRedirect`` on Windows. `Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2025/nov/05/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 6.0 :commit:`(patch) <6e13348436fccf8f22982921d6a3a3e65c956a9f>`
|
||||||
|
* Django 5.2 :commit:`(patch) <4f5d904b63751dea9ffc3b0e046404a7fa5881ac>`
|
||||||
|
* Django 5.1 :commit:`(patch) <3790593781d26168e7306b5b2f8ea0309de16242>`
|
||||||
|
* Django 4.2 :commit:`(patch) <770eea38d7a0e9ba9455140b5a9a9e33618226a7>`
|
||||||
|
|
||||||
|
November 5, 2025 - :cve:`2025-64459`
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
Potential SQL injection via ``_connector`` keyword argument in ``QuerySet`` and
|
||||||
|
``Q`` objects. `Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2025/nov/05/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 6.0 :commit:`(patch) <06dd38324ac3d60d83d9f3adabf0dcdf423d2a85>`
|
||||||
|
* Django 5.2 :commit:`(patch) <6703f364d767e949c5b0e4016433ef75063b4f9b>`
|
||||||
|
* Django 5.1 :commit:`(patch) <72d2c87431f2ae0431d65d0ec792047f078c8241>`
|
||||||
|
* Django 4.2 :commit:`(patch) <59ae82e67053d281ff4562a24bbba21299f0a7d4>`
|
||||||
|
|
||||||
October 1, 2025 - :cve:`2025-59681`
|
October 1, 2025 - :cve:`2025-59681`
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -627,8 +627,15 @@ fields you also select in a ``values()`` call.
|
||||||
You might reasonably ask why Django doesn't remove the extraneous columns
|
You might reasonably ask why Django doesn't remove the extraneous columns
|
||||||
for you. The main reason is consistency with ``distinct()`` and other
|
for you. The main reason is consistency with ``distinct()`` and other
|
||||||
places: Django **never** removes ordering constraints that you have
|
places: Django **never** removes ordering constraints that you have
|
||||||
specified (and we can't change those other methods' behavior, as that
|
specified *explicitly with* ``order_by()`` (and we can't change those
|
||||||
would violate our :doc:`/misc/api-stability` policy).
|
other methods' behavior, as that would violate our
|
||||||
|
:doc:`/misc/api-stability` policy).
|
||||||
|
|
||||||
|
.. admonition:: Default ordering not applied to GROUP BY
|
||||||
|
|
||||||
|
``GROUP BY`` queries (for example, those using ``.values()`` and
|
||||||
|
``.annotate()``) don't use the model's default ordering.
|
||||||
|
Use ``order_by()`` explicitly when a given order is needed.
|
||||||
|
|
||||||
Aggregating annotations
|
Aggregating annotations
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|
|
||||||
|
|
@ -773,6 +773,16 @@ specify this backend, put the following in your settings::
|
||||||
This backend is not intended for use in production -- it is provided as a
|
This backend is not intended for use in production -- it is provided as a
|
||||||
convenience that can be used during development.
|
convenience that can be used during development.
|
||||||
|
|
||||||
|
.. admonition:: There are community-maintained solutions too!
|
||||||
|
|
||||||
|
Django has a vibrant ecosystem. There are email backends
|
||||||
|
highlighted on the `Community Ecosystem`_ page. The Django Packages
|
||||||
|
`Email grid`_ has even more options for you!
|
||||||
|
|
||||||
|
.. _Community Ecosystem: https://www.djangoproject.com/community/ecosystem/#email-and-notifications
|
||||||
|
.. _Email grid: https://djangopackages.org/grids/g/email/
|
||||||
|
|
||||||
|
|
||||||
.. _topic-custom-email-backend:
|
.. _topic-custom-email-backend:
|
||||||
|
|
||||||
Defining a custom email backend
|
Defining a custom email backend
|
||||||
|
|
|
||||||
|
|
@ -426,6 +426,12 @@ Django is compatible with versions of PyPy corresponding to the supported
|
||||||
Python versions, but you will need to check the compatibility of other
|
Python versions, but you will need to check the compatibility of other
|
||||||
libraries you rely on.
|
libraries you rely on.
|
||||||
|
|
||||||
|
That said, a lot of a web framework's work is done by concatenating
|
||||||
|
strings, and PyPy has an issue with that (see
|
||||||
|
`this PyPy blog
|
||||||
|
<https://pypy.org/posts/2023/01/string-concatenation-quadratic.html>`_).
|
||||||
|
This may cause performance issues, depending on your use.
|
||||||
|
|
||||||
C implementations of Python libraries
|
C implementations of Python libraries
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -323,11 +323,15 @@ in order to reduce the number of sync/async calling-style switches within a
|
||||||
they are async before being called. This means that an asynchronous receiver
|
they are async before being called. This means that an asynchronous receiver
|
||||||
registered before a synchronous receiver may be executed after the synchronous
|
registered before a synchronous receiver may be executed after the synchronous
|
||||||
receiver. In addition, async receivers are executed concurrently using
|
receiver. In addition, async receivers are executed concurrently using
|
||||||
``asyncio.gather()``.
|
:class:`asyncio.TaskGroup`.
|
||||||
|
|
||||||
All built-in signals, except those in the async request-response cycle, are
|
All built-in signals, except those in the async request-response cycle, are
|
||||||
dispatched using :meth:`Signal.send`.
|
dispatched using :meth:`Signal.send`.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.1
|
||||||
|
|
||||||
|
In older versions, async receivers were executed via ``asyncio.gather()``.
|
||||||
|
|
||||||
Disconnecting signals
|
Disconnecting signals
|
||||||
=====================
|
=====================
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,11 @@ QUnit.test('init', function(assert) {
|
||||||
$('<div class="helptext">This is helpful.</div>').appendTo('#test');
|
$('<div class="helptext">This is helpful.</div>').appendTo('#test');
|
||||||
$('<select id="id"><option value="0">A</option></select>').appendTo('#test');
|
$('<select id="id"><option value="0">A</option></select>').appendTo('#test');
|
||||||
SelectFilter.init('id', 'things', 0);
|
SelectFilter.init('id', 'things', 0);
|
||||||
assert.equal($('#test').children().first().prop("tagName"), "DIV");
|
assert.deepEqual(
|
||||||
assert.equal($('#test').children().first().attr("class"), "selector");
|
Array.from($('#test')[0].children).map(child => child.tagName),
|
||||||
|
["LABEL", "DIV", "DIV"]
|
||||||
|
);
|
||||||
|
assert.equal($('.helptext')[0].nextSibling.getAttribute("class"), "selector");
|
||||||
assert.equal($('.selector-available label').text().trim(), "Available things");
|
assert.equal($('.selector-available label').text().trim(), "Available things");
|
||||||
assert.equal($('.selector-available label').attr("id"), "id_from_label");
|
assert.equal($('.selector-available label').attr("id"), "id_from_label");
|
||||||
assert.equal($('.selector-chosen label').text().trim(), "Chosen things");
|
assert.equal($('.selector-chosen label').text().trim(), "Chosen things");
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,6 @@ from django.core.management.base import LabelCommand, SystemCheckError
|
||||||
from django.core.management.commands.loaddata import Command as LoaddataCommand
|
from django.core.management.commands.loaddata import Command as LoaddataCommand
|
||||||
from django.core.management.commands.runserver import Command as RunserverCommand
|
from django.core.management.commands.runserver import Command as RunserverCommand
|
||||||
from django.core.management.commands.testserver import Command as TestserverCommand
|
from django.core.management.commands.testserver import Command as TestserverCommand
|
||||||
from django.core.management.utils import find_formatters
|
|
||||||
from django.db import ConnectionHandler, connection
|
from django.db import ConnectionHandler, connection
|
||||||
from django.db.migrations.recorder import MigrationRecorder
|
from django.db.migrations.recorder import MigrationRecorder
|
||||||
from django.test import LiveServerTestCase, SimpleTestCase, TestCase, override_settings
|
from django.test import LiveServerTestCase, SimpleTestCase, TestCase, override_settings
|
||||||
|
|
@ -49,6 +48,8 @@ custom_templates_dir = os.path.join(os.path.dirname(__file__), "custom_templates
|
||||||
|
|
||||||
SYSTEM_CHECK_MSG = "System check identified no issues"
|
SYSTEM_CHECK_MSG = "System check identified no issues"
|
||||||
|
|
||||||
|
HAS_BLACK = shutil.which("black")
|
||||||
|
|
||||||
|
|
||||||
class AdminScriptTestCase(SimpleTestCase):
|
class AdminScriptTestCase(SimpleTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
|
@ -112,20 +113,7 @@ class AdminScriptTestCase(SimpleTestCase):
|
||||||
paths.append(os.path.dirname(backend_dir))
|
paths.append(os.path.dirname(backend_dir))
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
@cached_property
|
def run_test(self, args, settings_file=None, apps=None, umask=-1):
|
||||||
def path_without_formatters(self):
|
|
||||||
return os.pathsep.join(
|
|
||||||
[
|
|
||||||
path_component
|
|
||||||
for path_component in os.environ.get("PATH", "").split(os.pathsep)
|
|
||||||
for formatter_path in find_formatters().values()
|
|
||||||
if os.path.commonpath([path_component, formatter_path]) == os.sep
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def run_test(
|
|
||||||
self, args, settings_file=None, apps=None, umask=-1, discover_formatters=False
|
|
||||||
):
|
|
||||||
base_dir = os.path.dirname(self.test_dir)
|
base_dir = os.path.dirname(self.test_dir)
|
||||||
# The base dir for Django's tests is one level up.
|
# The base dir for Django's tests is one level up.
|
||||||
tests_dir = os.path.dirname(os.path.dirname(__file__))
|
tests_dir = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
|
@ -147,8 +135,6 @@ class AdminScriptTestCase(SimpleTestCase):
|
||||||
python_path.extend(ext_backend_base_dirs)
|
python_path.extend(ext_backend_base_dirs)
|
||||||
test_environ["PYTHONPATH"] = os.pathsep.join(python_path)
|
test_environ["PYTHONPATH"] = os.pathsep.join(python_path)
|
||||||
test_environ["PYTHONWARNINGS"] = ""
|
test_environ["PYTHONWARNINGS"] = ""
|
||||||
if not discover_formatters:
|
|
||||||
test_environ["PATH"] = self.path_without_formatters
|
|
||||||
|
|
||||||
p = subprocess.run(
|
p = subprocess.run(
|
||||||
[sys.executable, *args],
|
[sys.executable, *args],
|
||||||
|
|
@ -160,19 +146,10 @@ class AdminScriptTestCase(SimpleTestCase):
|
||||||
)
|
)
|
||||||
return p.stdout, p.stderr
|
return p.stdout, p.stderr
|
||||||
|
|
||||||
def run_django_admin(
|
def run_django_admin(self, args, settings_file=None, umask=-1):
|
||||||
self, args, settings_file=None, umask=-1, discover_formatters=False
|
return self.run_test(["-m", "django", *args], settings_file, umask=umask)
|
||||||
):
|
|
||||||
return self.run_test(
|
|
||||||
["-m", "django", *args],
|
|
||||||
settings_file,
|
|
||||||
umask=umask,
|
|
||||||
discover_formatters=discover_formatters,
|
|
||||||
)
|
|
||||||
|
|
||||||
def run_manage(
|
def run_manage(self, args, settings_file=None, manage_py=None):
|
||||||
self, args, settings_file=None, manage_py=None, discover_formatters=False
|
|
||||||
):
|
|
||||||
template_manage_py = (
|
template_manage_py = (
|
||||||
os.path.join(os.path.dirname(__file__), manage_py)
|
os.path.join(os.path.dirname(__file__), manage_py)
|
||||||
if manage_py
|
if manage_py
|
||||||
|
|
@ -191,11 +168,17 @@ class AdminScriptTestCase(SimpleTestCase):
|
||||||
with open(test_manage_py, "w") as fp:
|
with open(test_manage_py, "w") as fp:
|
||||||
fp.write(manage_py_contents)
|
fp.write(manage_py_contents)
|
||||||
|
|
||||||
return self.run_test(
|
return self.run_test(["./manage.py", *args], settings_file)
|
||||||
["./manage.py", *args],
|
|
||||||
settings_file,
|
def assertInAfterFormatting(self, member, container, msg=None):
|
||||||
discover_formatters=discover_formatters,
|
if HAS_BLACK:
|
||||||
)
|
import black
|
||||||
|
|
||||||
|
# Black does not have a stable API, but this is still less fragile
|
||||||
|
# than attempting to filter out all paths where it is available.
|
||||||
|
member = black.format_str(member, mode=black.FileMode())
|
||||||
|
|
||||||
|
self.assertIn(member, container, msg=msg)
|
||||||
|
|
||||||
def assertNoOutput(self, stream):
|
def assertNoOutput(self, stream):
|
||||||
"Utility assertion: assert that the given stream is empty"
|
"Utility assertion: assert that the given stream is empty"
|
||||||
|
|
@ -772,7 +755,7 @@ class DjangoAdminSettingsDirectory(AdminScriptTestCase):
|
||||||
with open(os.path.join(app_path, "apps.py")) as f:
|
with open(os.path.join(app_path, "apps.py")) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
self.assertIn("class SettingsTestConfig(AppConfig)", content)
|
self.assertIn("class SettingsTestConfig(AppConfig)", content)
|
||||||
self.assertIn("name = 'settings_test'", content)
|
self.assertInAfterFormatting("name = 'settings_test'", content)
|
||||||
|
|
||||||
def test_setup_environ_custom_template(self):
|
def test_setup_environ_custom_template(self):
|
||||||
"""
|
"""
|
||||||
|
|
@ -797,7 +780,7 @@ class DjangoAdminSettingsDirectory(AdminScriptTestCase):
|
||||||
with open(os.path.join(app_path, "apps.py"), encoding="utf8") as f:
|
with open(os.path.join(app_path, "apps.py"), encoding="utf8") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
self.assertIn("class こんにちはConfig(AppConfig)", content)
|
self.assertIn("class こんにちはConfig(AppConfig)", content)
|
||||||
self.assertIn("name = 'こんにちは'", content)
|
self.assertInAfterFormatting("name = 'こんにちは'", content)
|
||||||
|
|
||||||
def test_builtin_command(self):
|
def test_builtin_command(self):
|
||||||
"""
|
"""
|
||||||
|
|
@ -1959,7 +1942,7 @@ class CommandTypes(AdminScriptTestCase):
|
||||||
def test_version(self):
|
def test_version(self):
|
||||||
"version is handled as a special case"
|
"version is handled as a special case"
|
||||||
args = ["version"]
|
args = ["version"]
|
||||||
out, err = self.run_manage(args, discover_formatters=True)
|
out, err = self.run_manage(args)
|
||||||
self.assertNoOutput(err)
|
self.assertNoOutput(err)
|
||||||
self.assertOutput(out, get_version())
|
self.assertOutput(out, get_version())
|
||||||
|
|
||||||
|
|
@ -2719,7 +2702,7 @@ class StartProject(LiveServerTestCase, AdminScriptTestCase):
|
||||||
args = ["startproject", "--template", template_path, "customtestproject"]
|
args = ["startproject", "--template", template_path, "customtestproject"]
|
||||||
testproject_dir = os.path.join(self.test_dir, "customtestproject")
|
testproject_dir = os.path.join(self.test_dir, "customtestproject")
|
||||||
|
|
||||||
_, err = self.run_django_admin(args, discover_formatters=True)
|
_, err = self.run_django_admin(args)
|
||||||
self.assertNoOutput(err)
|
self.assertNoOutput(err)
|
||||||
with open(
|
with open(
|
||||||
os.path.join(template_path, "additional_dir", "requirements.in")
|
os.path.join(template_path, "additional_dir", "requirements.in")
|
||||||
|
|
@ -2814,7 +2797,7 @@ class StartProject(LiveServerTestCase, AdminScriptTestCase):
|
||||||
f"{self.live_server_url}/user_agent_check/project_template.tgz"
|
f"{self.live_server_url}/user_agent_check/project_template.tgz"
|
||||||
)
|
)
|
||||||
args = ["startproject", "--template", template_url, "urltestproject"]
|
args = ["startproject", "--template", template_url, "urltestproject"]
|
||||||
_, err = self.run_django_admin(args, discover_formatters=True)
|
_, err = self.run_django_admin(args)
|
||||||
|
|
||||||
self.assertNoOutput(err)
|
self.assertNoOutput(err)
|
||||||
self.assertIn("Django/%s" % get_version(), user_agent)
|
self.assertIn("Django/%s" % get_version(), user_agent)
|
||||||
|
|
@ -2885,8 +2868,10 @@ class StartProject(LiveServerTestCase, AdminScriptTestCase):
|
||||||
test_manage_py = os.path.join(testproject_dir, "manage.py")
|
test_manage_py = os.path.join(testproject_dir, "manage.py")
|
||||||
with open(test_manage_py) as fp:
|
with open(test_manage_py) as fp:
|
||||||
content = fp.read()
|
content = fp.read()
|
||||||
self.assertIn('project_name = "another_project"', content)
|
self.assertInAfterFormatting('project_name = "another_project"', content)
|
||||||
self.assertIn('project_directory = "%s"' % testproject_dir, content)
|
self.assertInAfterFormatting(
|
||||||
|
'project_directory = "%s"' % testproject_dir, content
|
||||||
|
)
|
||||||
|
|
||||||
def test_no_escaping_of_project_variables(self):
|
def test_no_escaping_of_project_variables(self):
|
||||||
"Make sure template context variables are not html escaped"
|
"Make sure template context variables are not html escaped"
|
||||||
|
|
@ -2996,7 +2981,7 @@ class StartProject(LiveServerTestCase, AdminScriptTestCase):
|
||||||
self.assertNoOutput(err)
|
self.assertNoOutput(err)
|
||||||
render_py_path = os.path.join(testproject_dir, ".hidden", "render.py")
|
render_py_path = os.path.join(testproject_dir, ".hidden", "render.py")
|
||||||
with open(render_py_path) as fp:
|
with open(render_py_path) as fp:
|
||||||
self.assertIn(
|
self.assertInAfterFormatting(
|
||||||
f"# The {project_name} should be rendered.",
|
f"# The {project_name} should be rendered.",
|
||||||
fp.read(),
|
fp.read(),
|
||||||
)
|
)
|
||||||
|
|
@ -3156,7 +3141,7 @@ class StartApp(AdminScriptTestCase):
|
||||||
with open(os.path.join(app_path, "apps.py")) as f:
|
with open(os.path.join(app_path, "apps.py")) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
self.assertIn("class NewAppConfig(AppConfig)", content)
|
self.assertIn("class NewAppConfig(AppConfig)", content)
|
||||||
self.assertIn("name = 'new_app'", content)
|
self.assertInAfterFormatting("name = 'new_app'", content)
|
||||||
|
|
||||||
def test_creates_directory_when_custom_app_destination_missing(self):
|
def test_creates_directory_when_custom_app_destination_missing(self):
|
||||||
args = [
|
args = [
|
||||||
|
|
|
||||||
|
|
@ -2413,6 +2413,32 @@ class AdminViewPermissionsTest(TestCase):
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
|
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
|
||||||
|
|
||||||
|
def test_login_redirect_when_logged_in(self):
|
||||||
|
self.client.force_login(self.superuser)
|
||||||
|
response = self.client.get(reverse("admin:login"))
|
||||||
|
self.assertRedirects(response, reverse("admin:index"))
|
||||||
|
|
||||||
|
def test_login_redirect_to_next_url_when_logged_in(self):
|
||||||
|
self.client.force_login(self.superuser)
|
||||||
|
next_url = reverse("admin:admin_views_article_add")
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("admin:login"),
|
||||||
|
query_params={REDIRECT_FIELD_NAME: next_url},
|
||||||
|
)
|
||||||
|
self.assertRedirects(response, next_url)
|
||||||
|
|
||||||
|
def test_login_redirect_unsafe_next_url_when_logged_in(self):
|
||||||
|
self.client.force_login(self.superuser)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("admin:login"),
|
||||||
|
query_params={
|
||||||
|
REDIRECT_FIELD_NAME: "https://example.com/bad",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertRedirects(
|
||||||
|
response, reverse("admin:index"), fetch_redirect_response=False
|
||||||
|
)
|
||||||
|
|
||||||
def test_login_has_permission(self):
|
def test_login_has_permission(self):
|
||||||
# Regular User should not be able to login.
|
# Regular User should not be able to login.
|
||||||
response = self.client.get(reverse("has_permission_admin:index"))
|
response = self.client.get(reverse("has_permission_admin:index"))
|
||||||
|
|
|
||||||
|
|
@ -206,6 +206,45 @@ class UserModelChecksTests(SimpleTestCase):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@override_settings(AUTH_USER_MODEL="auth_tests.VulnerableStaticUser")
|
||||||
|
def test_is_anonymous_authenticated_static_methods(self):
|
||||||
|
"""
|
||||||
|
<User Model>.is_anonymous/is_authenticated must not be static methods.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class VulnerableStaticUser(AbstractBaseUser):
|
||||||
|
username = models.CharField(max_length=30, unique=True)
|
||||||
|
USERNAME_FIELD = "username"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_anonymous():
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_authenticated():
|
||||||
|
return False
|
||||||
|
|
||||||
|
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
|
||||||
|
self.assertEqual(
|
||||||
|
errors,
|
||||||
|
[
|
||||||
|
checks.Critical(
|
||||||
|
"%s.is_anonymous must be an attribute or property rather than "
|
||||||
|
"a method. Ignoring this is a security issue as anonymous "
|
||||||
|
"users will be treated as authenticated!" % VulnerableStaticUser,
|
||||||
|
obj=VulnerableStaticUser,
|
||||||
|
id="auth.C009",
|
||||||
|
),
|
||||||
|
checks.Critical(
|
||||||
|
"%s.is_authenticated must be an attribute or property rather "
|
||||||
|
"than a method. Ignoring this is a security issue as anonymous "
|
||||||
|
"users will be treated as authenticated!" % VulnerableStaticUser,
|
||||||
|
obj=VulnerableStaticUser,
|
||||||
|
id="auth.C010",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@isolate_apps("auth_tests", attr_name="apps")
|
@isolate_apps("auth_tests", attr_name="apps")
|
||||||
@override_system_checks([check_models_permissions])
|
@override_system_checks([check_models_permissions])
|
||||||
|
|
|
||||||
|
|
@ -517,8 +517,11 @@ class Tests(TestCase):
|
||||||
def test_correct_extraction_psycopg_version(self):
|
def test_correct_extraction_psycopg_version(self):
|
||||||
from django.db.backends.postgresql.base import Database, psycopg_version
|
from django.db.backends.postgresql.base import Database, psycopg_version
|
||||||
|
|
||||||
|
psycopg_version.cache_clear()
|
||||||
with mock.patch.object(Database, "__version__", "4.2.1 (dt dec pq3 ext lo64)"):
|
with mock.patch.object(Database, "__version__", "4.2.1 (dt dec pq3 ext lo64)"):
|
||||||
|
self.addCleanup(psycopg_version.cache_clear)
|
||||||
self.assertEqual(psycopg_version(), (4, 2, 1))
|
self.assertEqual(psycopg_version(), (4, 2, 1))
|
||||||
|
psycopg_version.cache_clear()
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
Database, "__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)"
|
Database, "__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)"
|
||||||
):
|
):
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import sqlite3
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
|
@ -215,15 +216,28 @@ class LastExecutedQueryTest(TestCase):
|
||||||
substituted = "SELECT '\"''\\'"
|
substituted = "SELECT '\"''\\'"
|
||||||
self.assertEqual(connection.queries[-1]["sql"], substituted)
|
self.assertEqual(connection.queries[-1]["sql"], substituted)
|
||||||
|
|
||||||
def test_large_number_of_parameters(self):
|
def test_parameter_count_exceeds_variable_or_column_limit(self):
|
||||||
# If SQLITE_MAX_VARIABLE_NUMBER (default = 999) has been changed to be
|
sql = "SELECT MAX(%s)" % ", ".join(["%s"] * 1001)
|
||||||
# greater than SQLITE_MAX_COLUMN (default = 2000), last_executed_query
|
params = list(range(1001))
|
||||||
# can hit the SQLITE_MAX_COLUMN limit (#26063).
|
for label, limit, current_limit in [
|
||||||
with connection.cursor() as cursor:
|
(
|
||||||
sql = "SELECT MAX(%s)" % ", ".join(["%s"] * 2001)
|
"variable",
|
||||||
params = list(range(2001))
|
sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER,
|
||||||
# This should not raise an exception.
|
connection.features.max_query_params,
|
||||||
cursor.db.ops.last_executed_query(cursor.cursor, sql, params)
|
),
|
||||||
|
(
|
||||||
|
"column",
|
||||||
|
sqlite3.SQLITE_LIMIT_COLUMN,
|
||||||
|
connection.connection.getlimit(sqlite3.SQLITE_LIMIT_COLUMN),
|
||||||
|
),
|
||||||
|
]:
|
||||||
|
with self.subTest(limit=label):
|
||||||
|
connection.connection.setlimit(limit, 1000)
|
||||||
|
self.addCleanup(connection.connection.setlimit, limit, current_limit)
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
# This should not raise an exception.
|
||||||
|
cursor.db.ops.last_executed_query(cursor.cursor, sql, params)
|
||||||
|
connection.connection.setlimit(limit, current_limit)
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
|
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
|
||||||
|
|
|
||||||
|
|
@ -884,6 +884,15 @@ class BulkCreateTests(TestCase):
|
||||||
(obj,) = DbDefaultPrimaryKey.objects.bulk_create([DbDefaultPrimaryKey()])
|
(obj,) = DbDefaultPrimaryKey.objects.bulk_create([DbDefaultPrimaryKey()])
|
||||||
self.assertIsInstance(obj.id, datetime)
|
self.assertIsInstance(obj.id, datetime)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature(
|
||||||
|
"can_return_rows_from_bulk_insert", "supports_expression_defaults"
|
||||||
|
)
|
||||||
|
def test_db_expression_primary_key(self):
|
||||||
|
(obj,) = DbDefaultPrimaryKey.objects.bulk_create(
|
||||||
|
[DbDefaultPrimaryKey(id=Now())]
|
||||||
|
)
|
||||||
|
self.assertIsInstance(obj.id, datetime)
|
||||||
|
|
||||||
|
|
||||||
@skipUnlessDBFeature("supports_transactions", "has_bulk_insert")
|
@skipUnlessDBFeature("supports_transactions", "has_bulk_insert")
|
||||||
class BulkCreateTransactionTests(TransactionTestCase):
|
class BulkCreateTransactionTests(TransactionTestCase):
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import sys
|
import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from unittest import skipIf
|
from unittest import mock, skipIf
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.core import checks
|
from django.core import checks
|
||||||
from django.core.checks import Error, Warning
|
from django.core.checks import Error, Tags, Warning
|
||||||
from django.core.checks.messages import CheckMessage
|
from django.core.checks.messages import CheckMessage
|
||||||
from django.core.checks.registry import CheckRegistry
|
from django.core.checks.registry import CheckRegistry
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
|
|
@ -92,6 +92,21 @@ class SystemCheckFrameworkTests(SimpleTestCase):
|
||||||
with self.assertRaisesMessage(TypeError, msg):
|
with self.assertRaisesMessage(TypeError, msg):
|
||||||
registry.run_checks()
|
registry.run_checks()
|
||||||
|
|
||||||
|
def test_run_checks_database_exclusion(self):
|
||||||
|
registry = CheckRegistry()
|
||||||
|
|
||||||
|
database_errors = [checks.Warning("Database Check")]
|
||||||
|
|
||||||
|
@registry.register(Tags.database)
|
||||||
|
def database_system_check(**kwargs):
|
||||||
|
return database_errors
|
||||||
|
|
||||||
|
errors = registry.run_checks()
|
||||||
|
self.assertEqual(errors, [])
|
||||||
|
|
||||||
|
errors = registry.run_checks(databases=["default"])
|
||||||
|
self.assertEqual(errors, database_errors)
|
||||||
|
|
||||||
|
|
||||||
class MessageTests(SimpleTestCase):
|
class MessageTests(SimpleTestCase):
|
||||||
def test_printing(self):
|
def test_printing(self):
|
||||||
|
|
@ -190,10 +205,12 @@ class CheckCommandTests(SimpleTestCase):
|
||||||
def test_simple_call(self):
|
def test_simple_call(self):
|
||||||
call_command("check")
|
call_command("check")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
simple_system_check.kwargs, {"app_configs": None, "databases": None}
|
simple_system_check.kwargs,
|
||||||
|
{"app_configs": None, "databases": ["default", "other"]},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
tagged_system_check.kwargs, {"app_configs": None, "databases": None}
|
tagged_system_check.kwargs,
|
||||||
|
{"app_configs": None, "databases": ["default", "other"]},
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_system_checks([simple_system_check, tagged_system_check])
|
@override_system_checks([simple_system_check, tagged_system_check])
|
||||||
|
|
@ -203,11 +220,17 @@ class CheckCommandTests(SimpleTestCase):
|
||||||
admin_config = apps.get_app_config("admin")
|
admin_config = apps.get_app_config("admin")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
simple_system_check.kwargs,
|
simple_system_check.kwargs,
|
||||||
{"app_configs": [auth_config, admin_config], "databases": None},
|
{
|
||||||
|
"app_configs": [auth_config, admin_config],
|
||||||
|
"databases": ["default", "other"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
tagged_system_check.kwargs,
|
tagged_system_check.kwargs,
|
||||||
{"app_configs": [auth_config, admin_config], "databases": None},
|
{
|
||||||
|
"app_configs": [auth_config, admin_config],
|
||||||
|
"databases": ["default", "other"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_system_checks([simple_system_check, tagged_system_check])
|
@override_system_checks([simple_system_check, tagged_system_check])
|
||||||
|
|
@ -215,7 +238,8 @@ class CheckCommandTests(SimpleTestCase):
|
||||||
call_command("check", tags=["simpletag"])
|
call_command("check", tags=["simpletag"])
|
||||||
self.assertIsNone(simple_system_check.kwargs)
|
self.assertIsNone(simple_system_check.kwargs)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
tagged_system_check.kwargs, {"app_configs": None, "databases": None}
|
tagged_system_check.kwargs,
|
||||||
|
{"app_configs": None, "databases": ["default", "other"]},
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_system_checks([simple_system_check, tagged_system_check])
|
@override_system_checks([simple_system_check, tagged_system_check])
|
||||||
|
|
@ -268,6 +292,17 @@ class CheckCommandTests(SimpleTestCase):
|
||||||
with self.assertRaises(CommandError):
|
with self.assertRaises(CommandError):
|
||||||
call_command("check", fail_level="WARNING")
|
call_command("check", fail_level="WARNING")
|
||||||
|
|
||||||
|
def test_database_system_checks(self):
|
||||||
|
database_check = mock.Mock(return_value=[], tags=[Tags.database])
|
||||||
|
|
||||||
|
with override_system_checks([database_check]):
|
||||||
|
call_command("check")
|
||||||
|
database_check.assert_not_called()
|
||||||
|
call_command("check", databases=["default"])
|
||||||
|
database_check.assert_called_once_with(
|
||||||
|
app_configs=None, databases=["default"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def custom_error_system_check(app_configs, **kwargs):
|
def custom_error_system_check(app_configs, **kwargs):
|
||||||
return [Error("Error", id="myerrorcheck.E001")]
|
return [Error("Error", id="myerrorcheck.E001")]
|
||||||
|
|
|
||||||
|
|
@ -268,3 +268,38 @@ class CompositePKChecksTests(TestCase):
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_proxy_model_can_subclass_model_with_composite_pk(self):
|
||||||
|
class Foo(models.Model):
|
||||||
|
pk = models.CompositePrimaryKey("a", "b")
|
||||||
|
a = models.SmallIntegerField()
|
||||||
|
b = models.SmallIntegerField()
|
||||||
|
|
||||||
|
class Bar(Foo):
|
||||||
|
class Meta:
|
||||||
|
proxy = True
|
||||||
|
|
||||||
|
self.assertEqual(Foo.check(databases=self.databases), [])
|
||||||
|
self.assertEqual(Bar.check(databases=self.databases), [])
|
||||||
|
|
||||||
|
def test_proxy_model_does_not_check_superclass_composite_pk_errors(self):
|
||||||
|
class Foo(models.Model):
|
||||||
|
pk = models.CompositePrimaryKey("a", "b")
|
||||||
|
a = models.SmallIntegerField()
|
||||||
|
|
||||||
|
class Bar(Foo):
|
||||||
|
class Meta:
|
||||||
|
proxy = True
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
Foo.check(databases=self.databases),
|
||||||
|
[
|
||||||
|
checks.Error(
|
||||||
|
"'b' cannot be included in the composite primary key.",
|
||||||
|
hint="'b' is not a valid field.",
|
||||||
|
obj=Foo,
|
||||||
|
id="models.E042",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
self.assertEqual(Bar.check(databases=self.databases), [])
|
||||||
|
|
|
||||||
|
|
@ -3913,7 +3913,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field"].legend_tag(),
|
f["field"].legend_tag(),
|
||||||
'<legend for="id_field" class="required">Field:</legend>',
|
'<legend class="required">Field:</legend>',
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field"].label_tag(attrs={"class": "foo"}),
|
f["field"].label_tag(attrs={"class": "foo"}),
|
||||||
|
|
@ -3921,14 +3921,14 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field"].legend_tag(attrs={"class": "foo"}),
|
f["field"].legend_tag(attrs={"class": "foo"}),
|
||||||
'<legend for="id_field" class="foo required">Field:</legend>',
|
'<legend class="foo required">Field:</legend>',
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field2"].label_tag(), '<label for="id_field2">Field2:</label>'
|
f["field2"].label_tag(), '<label for="id_field2">Field2:</label>'
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field2"].legend_tag(),
|
f["field2"].legend_tag(),
|
||||||
'<legend for="id_field2">Field2:</legend>',
|
"<legend>Field2:</legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_label_split_datetime_not_displayed(self):
|
def test_label_split_datetime_not_displayed(self):
|
||||||
|
|
@ -4190,31 +4190,47 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
|
|
||||||
boundfield = SomeForm()["field"]
|
boundfield = SomeForm()["field"]
|
||||||
|
|
||||||
testcases = [ # (args, kwargs, expected)
|
testcases = [ # (args, kwargs, expected_label, expected_legend)
|
||||||
# without anything: just print the <label>
|
# without anything: just print the <label>/<legend>
|
||||||
((), {}, '<%(tag)s for="id_field">Field:</%(tag)s>'),
|
((), {}, '<label for="id_field">Field:</label>', "<legend>Field:</legend>"),
|
||||||
# passing just one argument: overrides the field's label
|
# passing just one argument: overrides the field's label
|
||||||
(("custom",), {}, '<%(tag)s for="id_field">custom:</%(tag)s>'),
|
(
|
||||||
|
("custom",),
|
||||||
|
{},
|
||||||
|
'<label for="id_field">custom:</label>',
|
||||||
|
"<legend>custom:</legend>",
|
||||||
|
),
|
||||||
# the overridden label is escaped
|
# the overridden label is escaped
|
||||||
(("custom&",), {}, '<%(tag)s for="id_field">custom&:</%(tag)s>'),
|
(
|
||||||
((mark_safe("custom&"),), {}, '<%(tag)s for="id_field">custom&:</%(tag)s>'),
|
("custom&",),
|
||||||
# Passing attrs to add extra attributes on the <label>
|
{},
|
||||||
|
'<label for="id_field">custom&:</label>',
|
||||||
|
"<legend>custom&:</legend>",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
(mark_safe("custom&"),),
|
||||||
|
{},
|
||||||
|
'<label for="id_field">custom&:</label>',
|
||||||
|
"<legend>custom&:</legend>",
|
||||||
|
),
|
||||||
|
# Passing attrs to add extra attributes on the <label>/<legend>
|
||||||
(
|
(
|
||||||
(),
|
(),
|
||||||
{"attrs": {"class": "pretty"}},
|
{"attrs": {"class": "pretty"}},
|
||||||
'<%(tag)s for="id_field" class="pretty">Field:</%(tag)s>',
|
'<label for="id_field" class="pretty">Field:</label>',
|
||||||
|
'<legend class="pretty">Field:</legend>',
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for args, kwargs, expected in testcases:
|
for args, kwargs, expected_label, expected_legend in testcases:
|
||||||
with self.subTest(args=args, kwargs=kwargs):
|
with self.subTest(args=args, kwargs=kwargs):
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
boundfield.label_tag(*args, **kwargs),
|
boundfield.label_tag(*args, **kwargs),
|
||||||
expected % {"tag": "label"},
|
expected_label,
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
boundfield.legend_tag(*args, **kwargs),
|
boundfield.legend_tag(*args, **kwargs),
|
||||||
expected % {"tag": "legend"},
|
expected_legend,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_boundfield_label_tag_no_id(self):
|
def test_boundfield_label_tag_no_id(self):
|
||||||
|
|
@ -4252,7 +4268,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
form["custom"].legend_tag(),
|
form["custom"].legend_tag(),
|
||||||
'<legend for="custom_id_custom">Custom:</legend>',
|
"<legend>Custom:</legend>",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(form["empty"].label_tag(), "<label>Empty:</label>")
|
self.assertHTMLEqual(form["empty"].label_tag(), "<label>Empty:</label>")
|
||||||
self.assertHTMLEqual(form["empty"].legend_tag(), "<legend>Empty:</legend>")
|
self.assertHTMLEqual(form["empty"].legend_tag(), "<legend>Empty:</legend>")
|
||||||
|
|
@ -4266,7 +4282,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
self.assertHTMLEqual(boundfield.label_tag(), '<label for="id_field"></label>')
|
self.assertHTMLEqual(boundfield.label_tag(), '<label for="id_field"></label>')
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
boundfield.legend_tag(),
|
boundfield.legend_tag(),
|
||||||
'<legend for="id_field"></legend>',
|
"<legend></legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_boundfield_id_for_label(self):
|
def test_boundfield_id_for_label(self):
|
||||||
|
|
@ -4339,7 +4355,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
boundfield.legend_tag(label_suffix="$"),
|
boundfield.legend_tag(label_suffix="$"),
|
||||||
'<legend for="id_field">Field$</legend>',
|
"<legend>Field$</legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_error_dict(self):
|
def test_error_dict(self):
|
||||||
|
|
@ -4879,7 +4895,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
field.legend_tag(),
|
field.legend_tag(),
|
||||||
'<legend for="id_first_name">First name:</legend>',
|
"<legend>First name:</legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_settings(USE_THOUSAND_SEPARATOR=True)
|
@override_settings(USE_THOUSAND_SEPARATOR=True)
|
||||||
|
|
@ -4892,7 +4908,7 @@ aria-describedby="id_age_error"></td></tr>""",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
field.legend_tag(attrs={"number": 9999}),
|
field.legend_tag(attrs={"number": 9999}),
|
||||||
'<legend number="9999" for="id_first_name">First name:</legend>',
|
'<legend number="9999">First name:</legend>',
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_remove_cached_field(self):
|
def test_remove_cached_field(self):
|
||||||
|
|
@ -5204,12 +5220,12 @@ class TemplateTests(SimpleTestCase):
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
t.render(Context({"form": f})),
|
t.render(Context({"form": f})),
|
||||||
"<form>"
|
"<form>"
|
||||||
'<p><legend for="id_username">Username:</legend>'
|
"<p><legend>Username:</legend>"
|
||||||
'<input id="id_username" type="text" name="username" maxlength="10" '
|
'<input id="id_username" type="text" name="username" maxlength="10" '
|
||||||
'aria-describedby="id_username_helptext" required></p>'
|
'aria-describedby="id_username_helptext" required></p>'
|
||||||
'<p><legend for="id_password1">Password1:</legend>'
|
"<p><legend>Password1:</legend>"
|
||||||
'<input type="password" name="password1" id="id_password1" required></p>'
|
'<input type="password" name="password1" id="id_password1" required></p>'
|
||||||
'<p><legend for="id_password2">Password2:</legend>'
|
"<p><legend>Password2:</legend>"
|
||||||
'<input type="password" name="password2" id="id_password2" required></p>'
|
'<input type="password" name="password2" id="id_password2" required></p>'
|
||||||
'<input type="submit" required>'
|
'<input type="submit" required>'
|
||||||
"</form>",
|
"</form>",
|
||||||
|
|
|
||||||
|
|
@ -59,14 +59,14 @@ class FormsI18nTests(SimpleTestCase):
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field_1"].legend_tag(),
|
f["field_1"].legend_tag(),
|
||||||
'<legend for="id_field_1">field_1:</legend>',
|
"<legend>field_1:</legend>",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field_2"].label_tag(), '<label for="field_2_id">field_2:</label>'
|
f["field_2"].label_tag(), '<label for="field_2_id">field_2:</label>'
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
f["field_2"].legend_tag(),
|
f["field_2"].legend_tag(),
|
||||||
'<legend for="field_2_id">field_2:</legend>',
|
"<legend>field_2:</legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_non_ascii_choices(self):
|
def test_non_ascii_choices(self):
|
||||||
|
|
|
||||||
|
|
@ -248,12 +248,12 @@ class ClearableFileInputTest(WidgetTest):
|
||||||
form = TestForm()
|
form = TestForm()
|
||||||
self.assertIs(self.widget.use_fieldset, True)
|
self.assertIs(self.widget.use_fieldset, True)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
'<div><fieldset><legend for="id_field">Field:</legend>'
|
"<div><fieldset><legend>Field:</legend>"
|
||||||
'<input id="id_field" name="field" type="file" required></fieldset></div>'
|
'<input id="id_field" name="field" type="file" required></fieldset></div>'
|
||||||
'<div><fieldset><legend for="id_with_file">With file:</legend>Currently: '
|
"<div><fieldset><legend>With file:</legend>Currently: "
|
||||||
'<a href="something">something</a><br>Change:<input type="file" '
|
'<a href="something">something</a><br>Change:<input type="file" '
|
||||||
'name="with_file" id="id_with_file"></fieldset></div>'
|
'name="with_file" id="id_with_file"></fieldset></div>'
|
||||||
'<div><fieldset><legend for="id_clearable_file">Clearable file:</legend>'
|
"<div><fieldset><legend>Clearable file:</legend>"
|
||||||
'Currently: <a href="something">something</a><input '
|
'Currently: <a href="something">something</a><input '
|
||||||
'type="checkbox" name="clearable_file-clear" id="clearable_file-clear_id">'
|
'type="checkbox" name="clearable_file-clear" id="clearable_file-clear_id">'
|
||||||
'<label for="clearable_file-clear_id">Clear</label><br>Change:'
|
'<label for="clearable_file-clear_id">Clear</label><br>Change:'
|
||||||
|
|
|
||||||
|
|
@ -718,7 +718,7 @@ class SelectDateWidgetTest(WidgetTest):
|
||||||
form = TestForm()
|
form = TestForm()
|
||||||
self.assertIs(self.widget.use_fieldset, True)
|
self.assertIs(self.widget.use_fieldset, True)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
'<div><fieldset><legend for="id_field_month">Field:</legend>'
|
"<div><fieldset><legend>Field:</legend>"
|
||||||
'<select name="field_month" required id="id_field_month">'
|
'<select name="field_month" required id="id_field_month">'
|
||||||
'<option value="1">January</option><option value="2">February</option>'
|
'<option value="1">January</option><option value="2">February</option>'
|
||||||
'<option value="3">March</option><option value="4">April</option>'
|
'<option value="3">March</option><option value="4">April</option>'
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ from django.http import (
|
||||||
)
|
)
|
||||||
from django.test import SimpleTestCase
|
from django.test import SimpleTestCase
|
||||||
from django.utils.functional import lazystr
|
from django.utils.functional import lazystr
|
||||||
|
from django.utils.http import MAX_URL_LENGTH
|
||||||
|
|
||||||
|
|
||||||
class QueryDictTests(SimpleTestCase):
|
class QueryDictTests(SimpleTestCase):
|
||||||
|
|
@ -490,6 +491,7 @@ class HttpResponseTests(SimpleTestCase):
|
||||||
'data:text/html,<script>window.alert("xss")</script>',
|
'data:text/html,<script>window.alert("xss")</script>',
|
||||||
"mailto:test@example.com",
|
"mailto:test@example.com",
|
||||||
"file:///etc/passwd",
|
"file:///etc/passwd",
|
||||||
|
"é" * (MAX_URL_LENGTH + 1),
|
||||||
]
|
]
|
||||||
for url in bad_urls:
|
for url in bad_urls:
|
||||||
with self.assertRaises(DisallowedRedirect):
|
with self.assertRaises(DisallowedRedirect):
|
||||||
|
|
|
||||||
|
|
@ -153,6 +153,9 @@ class HumanizeTests(SimpleTestCase):
|
||||||
"-1234567.1234567",
|
"-1234567.1234567",
|
||||||
Decimal("1234567.1234567"),
|
Decimal("1234567.1234567"),
|
||||||
Decimal("-1234567.1234567"),
|
Decimal("-1234567.1234567"),
|
||||||
|
Decimal("Infinity"),
|
||||||
|
Decimal("-Infinity"),
|
||||||
|
Decimal("NaN"),
|
||||||
None,
|
None,
|
||||||
"1234567",
|
"1234567",
|
||||||
"-1234567",
|
"-1234567",
|
||||||
|
|
@ -193,6 +196,9 @@ class HumanizeTests(SimpleTestCase):
|
||||||
"-1,234,567.1234567",
|
"-1,234,567.1234567",
|
||||||
"1,234,567.1234567",
|
"1,234,567.1234567",
|
||||||
"-1,234,567.1234567",
|
"-1,234,567.1234567",
|
||||||
|
"Infinity",
|
||||||
|
"-Infinity",
|
||||||
|
"NaN",
|
||||||
None,
|
None,
|
||||||
"1,234,567",
|
"1,234,567",
|
||||||
"-1,234,567",
|
"-1,234,567",
|
||||||
|
|
|
||||||
|
|
@ -121,6 +121,15 @@ class CharFieldUnlimited(models.Model):
|
||||||
required_db_features = {"supports_unlimited_charfield"}
|
required_db_features = {"supports_unlimited_charfield"}
|
||||||
|
|
||||||
|
|
||||||
|
class DecimalFieldNoPrec(models.Model):
|
||||||
|
decimal_field_no_precision = models.DecimalField(
|
||||||
|
max_digits=None, decimal_places=None
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {"supports_no_precision_decimalfield"}
|
||||||
|
|
||||||
|
|
||||||
class UniqueTogether(models.Model):
|
class UniqueTogether(models.Model):
|
||||||
field1 = models.IntegerField()
|
field1 = models.IntegerField()
|
||||||
field2 = models.CharField(max_length=10)
|
field2 = models.CharField(max_length=10)
|
||||||
|
|
@ -161,3 +170,11 @@ class CompositePKModel(models.Model):
|
||||||
pk = models.CompositePrimaryKey("column_1", "column_2")
|
pk = models.CompositePrimaryKey("column_1", "column_2")
|
||||||
column_1 = models.IntegerField()
|
column_1 = models.IntegerField()
|
||||||
column_2 = models.IntegerField()
|
column_2 = models.IntegerField()
|
||||||
|
|
||||||
|
|
||||||
|
class DbOnDeleteModel(models.Model):
|
||||||
|
fk_do_nothing = models.ForeignKey(UniqueTogether, on_delete=models.DO_NOTHING)
|
||||||
|
fk_db_cascade = models.ForeignKey(ColumnTypes, on_delete=models.DB_CASCADE)
|
||||||
|
fk_set_null = models.ForeignKey(
|
||||||
|
DigitsInColumnName, on_delete=models.DB_SET_NULL, null=True
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -202,6 +202,13 @@ class InspectDBTestCase(TestCase):
|
||||||
output = out.getvalue()
|
output = out.getvalue()
|
||||||
self.assertIn("char_field = models.CharField()", output)
|
self.assertIn("char_field = models.CharField()", output)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("supports_no_precision_decimalfield")
|
||||||
|
def test_decimal_field_no_precision(self):
|
||||||
|
out = StringIO()
|
||||||
|
call_command("inspectdb", "inspectdb_decimalfieldnoprec", stdout=out)
|
||||||
|
output = out.getvalue()
|
||||||
|
self.assertIn("decimal_field_no_precision = models.DecimalField()", output)
|
||||||
|
|
||||||
def test_number_field_types(self):
|
def test_number_field_types(self):
|
||||||
"""Test introspection of various Django field types"""
|
"""Test introspection of various Django field types"""
|
||||||
assertFieldType = self.make_field_type_asserter()
|
assertFieldType = self.make_field_type_asserter()
|
||||||
|
|
@ -228,13 +235,8 @@ class InspectDBTestCase(TestCase):
|
||||||
assertFieldType(
|
assertFieldType(
|
||||||
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
|
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
|
||||||
)
|
)
|
||||||
else: # Guessed arguments on SQLite, see #5014
|
else:
|
||||||
assertFieldType(
|
assertFieldType("decimal_field", "models.DecimalField()")
|
||||||
"decimal_field",
|
|
||||||
"models.DecimalField(max_digits=10, decimal_places=5) "
|
|
||||||
"# max_digits and decimal_places have been guessed, "
|
|
||||||
"as this database handles decimal fields as float",
|
|
||||||
)
|
|
||||||
|
|
||||||
assertFieldType("float_field", "models.FloatField()")
|
assertFieldType("float_field", "models.FloatField()")
|
||||||
assertFieldType(
|
assertFieldType(
|
||||||
|
|
@ -299,6 +301,27 @@ class InspectDBTestCase(TestCase):
|
||||||
out.getvalue(),
|
out.getvalue(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("can_introspect_foreign_keys")
|
||||||
|
def test_foreign_key_db_on_delete(self):
|
||||||
|
out = StringIO()
|
||||||
|
call_command("inspectdb", "inspectdb_dbondeletemodel", stdout=out)
|
||||||
|
output = out.getvalue()
|
||||||
|
self.assertIn(
|
||||||
|
"fk_do_nothing = models.ForeignKey('InspectdbUniquetogether', "
|
||||||
|
"models.DO_NOTHING)",
|
||||||
|
output,
|
||||||
|
)
|
||||||
|
self.assertIn(
|
||||||
|
"fk_db_cascade = models.ForeignKey('InspectdbColumntypes', "
|
||||||
|
"models.DB_CASCADE)",
|
||||||
|
output,
|
||||||
|
)
|
||||||
|
self.assertIn(
|
||||||
|
"fk_set_null = models.ForeignKey('InspectdbDigitsincolumnname', "
|
||||||
|
"models.DB_SET_NULL, blank=True, null=True)",
|
||||||
|
output,
|
||||||
|
)
|
||||||
|
|
||||||
def test_digits_column_name_introspection(self):
|
def test_digits_column_name_introspection(self):
|
||||||
"""
|
"""
|
||||||
Introspection of column names consist/start with digits (#16536/#17676)
|
Introspection of column names consist/start with digits (#16536/#17676)
|
||||||
|
|
|
||||||
|
|
@ -110,3 +110,18 @@ class DbCommentModel(models.Model):
|
||||||
class Meta:
|
class Meta:
|
||||||
db_table_comment = "Custom table comment"
|
db_table_comment = "Custom table comment"
|
||||||
required_db_features = {"supports_comments"}
|
required_db_features = {"supports_comments"}
|
||||||
|
|
||||||
|
|
||||||
|
class DbOnDeleteModel(models.Model):
|
||||||
|
fk_do_nothing = models.ForeignKey(Country, on_delete=models.DO_NOTHING)
|
||||||
|
fk_db_cascade = models.ForeignKey(City, on_delete=models.DB_CASCADE)
|
||||||
|
fk_set_null = models.ForeignKey(Reporter, on_delete=models.DB_SET_NULL, null=True)
|
||||||
|
|
||||||
|
|
||||||
|
class DbOnDeleteSetDefaultModel(models.Model):
|
||||||
|
fk_db_set_default = models.ForeignKey(
|
||||||
|
Country, on_delete=models.DB_SET_DEFAULT, db_default=models.Value(1)
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {"supports_on_delete_db_default"}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from django.db import DatabaseError, connection
|
from django.db import DatabaseError, connection
|
||||||
from django.db.models import Index
|
from django.db.models import DB_CASCADE, DB_SET_DEFAULT, DB_SET_NULL, DO_NOTHING, Index
|
||||||
from django.test import TransactionTestCase, skipUnlessDBFeature
|
from django.test import TransactionTestCase, skipUnlessDBFeature
|
||||||
|
|
||||||
from .models import (
|
from .models import (
|
||||||
|
|
@ -10,6 +10,8 @@ from .models import (
|
||||||
Comment,
|
Comment,
|
||||||
Country,
|
Country,
|
||||||
DbCommentModel,
|
DbCommentModel,
|
||||||
|
DbOnDeleteModel,
|
||||||
|
DbOnDeleteSetDefaultModel,
|
||||||
District,
|
District,
|
||||||
Reporter,
|
Reporter,
|
||||||
UniqueConstraintConditionModel,
|
UniqueConstraintConditionModel,
|
||||||
|
|
@ -219,10 +221,14 @@ class IntrospectionTests(TransactionTestCase):
|
||||||
cursor, Article._meta.db_table
|
cursor, Article._meta.db_table
|
||||||
)
|
)
|
||||||
|
|
||||||
# That's {field_name: (field_name_other_table, other_table)}
|
if connection.vendor == "mysql" and connection.mysql_is_mariadb:
|
||||||
|
no_db_on_delete = None
|
||||||
|
else:
|
||||||
|
no_db_on_delete = DO_NOTHING
|
||||||
|
# {field_name: (field_name_other_table, other_table, db_on_delete)}
|
||||||
expected_relations = {
|
expected_relations = {
|
||||||
"reporter_id": ("id", Reporter._meta.db_table),
|
"reporter_id": ("id", Reporter._meta.db_table, no_db_on_delete),
|
||||||
"response_to_id": ("id", Article._meta.db_table),
|
"response_to_id": ("id", Article._meta.db_table, no_db_on_delete),
|
||||||
}
|
}
|
||||||
self.assertEqual(relations, expected_relations)
|
self.assertEqual(relations, expected_relations)
|
||||||
|
|
||||||
|
|
@ -238,6 +244,38 @@ class IntrospectionTests(TransactionTestCase):
|
||||||
editor.add_field(Article, body)
|
editor.add_field(Article, body)
|
||||||
self.assertEqual(relations, expected_relations)
|
self.assertEqual(relations, expected_relations)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("can_introspect_foreign_keys")
|
||||||
|
def test_get_relations_db_on_delete(self):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
relations = connection.introspection.get_relations(
|
||||||
|
cursor, DbOnDeleteModel._meta.db_table
|
||||||
|
)
|
||||||
|
|
||||||
|
if connection.vendor == "mysql" and connection.mysql_is_mariadb:
|
||||||
|
no_db_on_delete = None
|
||||||
|
else:
|
||||||
|
no_db_on_delete = DO_NOTHING
|
||||||
|
# {field_name: (field_name_other_table, other_table, db_on_delete)}
|
||||||
|
expected_relations = {
|
||||||
|
"fk_db_cascade_id": ("id", City._meta.db_table, DB_CASCADE),
|
||||||
|
"fk_do_nothing_id": ("id", Country._meta.db_table, no_db_on_delete),
|
||||||
|
"fk_set_null_id": ("id", Reporter._meta.db_table, DB_SET_NULL),
|
||||||
|
}
|
||||||
|
self.assertEqual(relations, expected_relations)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("can_introspect_foreign_keys", "supports_on_delete_db_default")
|
||||||
|
def test_get_relations_db_on_delete_default(self):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
relations = connection.introspection.get_relations(
|
||||||
|
cursor, DbOnDeleteSetDefaultModel._meta.db_table
|
||||||
|
)
|
||||||
|
|
||||||
|
# {field_name: (field_name_other_table, other_table, db_on_delete)}
|
||||||
|
expected_relations = {
|
||||||
|
"fk_db_set_default_id": ("id", Country._meta.db_table, DB_SET_DEFAULT),
|
||||||
|
}
|
||||||
|
self.assertEqual(relations, expected_relations)
|
||||||
|
|
||||||
def test_get_primary_key_column(self):
|
def test_get_primary_key_column(self):
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
primary_key_column = connection.introspection.get_primary_key_column(
|
primary_key_column = connection.introspection.get_primary_key_column(
|
||||||
|
|
|
||||||
|
|
@ -599,15 +599,16 @@ class DateTimeFieldTests(SimpleTestCase):
|
||||||
|
|
||||||
|
|
||||||
@isolate_apps("invalid_models_tests")
|
@isolate_apps("invalid_models_tests")
|
||||||
class DecimalFieldTests(SimpleTestCase):
|
class DecimalFieldTests(TestCase):
|
||||||
def test_required_attributes(self):
|
def test_both_attributes_omitted(self):
|
||||||
class Model(models.Model):
|
class Model(models.Model):
|
||||||
field = models.DecimalField()
|
field = models.DecimalField()
|
||||||
|
|
||||||
field = Model._meta.get_field("field")
|
field = Model._meta.get_field("field")
|
||||||
self.assertEqual(
|
if connection.features.supports_no_precision_decimalfield:
|
||||||
field.check(),
|
expected = []
|
||||||
[
|
else:
|
||||||
|
expected = [
|
||||||
Error(
|
Error(
|
||||||
"DecimalFields must define a 'decimal_places' attribute.",
|
"DecimalFields must define a 'decimal_places' attribute.",
|
||||||
obj=field,
|
obj=field,
|
||||||
|
|
@ -618,6 +619,52 @@ class DecimalFieldTests(SimpleTestCase):
|
||||||
obj=field,
|
obj=field,
|
||||||
id="fields.E132",
|
id="fields.E132",
|
||||||
),
|
),
|
||||||
|
]
|
||||||
|
self.assertEqual(field.check(), expected)
|
||||||
|
|
||||||
|
def test_both_attributes_omitted_required_db_features(self):
|
||||||
|
class Model(models.Model):
|
||||||
|
field = models.DecimalField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {"supports_no_precision_decimalfield"}
|
||||||
|
|
||||||
|
field = Model._meta.get_field("field")
|
||||||
|
self.assertEqual(field.check(databases=self.databases), [])
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("supports_no_precision_decimalfield")
|
||||||
|
def test_only_max_digits_defined(self):
|
||||||
|
class Model(models.Model):
|
||||||
|
field = models.DecimalField(max_digits=13)
|
||||||
|
|
||||||
|
field = Model._meta.get_field("field")
|
||||||
|
self.assertEqual(
|
||||||
|
field.check(),
|
||||||
|
[
|
||||||
|
Error(
|
||||||
|
"DecimalField’s max_digits and decimal_places must both "
|
||||||
|
"be defined or both omitted.",
|
||||||
|
obj=field,
|
||||||
|
id="fields.E135",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("supports_no_precision_decimalfield")
|
||||||
|
def test_only_decimal_places_defined(self):
|
||||||
|
class Model(models.Model):
|
||||||
|
field = models.DecimalField(decimal_places=5)
|
||||||
|
|
||||||
|
field = Model._meta.get_field("field")
|
||||||
|
self.assertEqual(
|
||||||
|
field.check(),
|
||||||
|
[
|
||||||
|
Error(
|
||||||
|
"DecimalField’s max_digits and decimal_places must both "
|
||||||
|
"be defined or both omitted.",
|
||||||
|
obj=field,
|
||||||
|
id="fields.E135",
|
||||||
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -113,7 +113,7 @@ class MigrateTests(MigrationTestBase):
|
||||||
out = io.StringIO()
|
out = io.StringIO()
|
||||||
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
|
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
|
||||||
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
|
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
|
||||||
mocked_check.assert_called_once()
|
mocked_check.assert_called_once_with(databases=["default"])
|
||||||
|
|
||||||
def test_migrate_with_custom_system_checks(self):
|
def test_migrate_with_custom_system_checks(self):
|
||||||
original_checks = registry.registered_checks.copy()
|
original_checks = registry.registered_checks.copy()
|
||||||
|
|
@ -137,6 +137,25 @@ class MigrateTests(MigrationTestBase):
|
||||||
command = CustomMigrateCommandWithSecurityChecks()
|
command = CustomMigrateCommandWithSecurityChecks()
|
||||||
call_command(command, skip_checks=False, stdout=io.StringIO())
|
call_command(command, skip_checks=False, stdout=io.StringIO())
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
INSTALLED_APPS=[
|
||||||
|
"django.contrib.auth",
|
||||||
|
"django.contrib.contenttypes",
|
||||||
|
"migrations.migrations_test_apps.migrated_app",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_migrate_runs_database_system_checks(self):
|
||||||
|
original_checks = registry.registered_checks.copy()
|
||||||
|
self.addCleanup(setattr, registry, "registered_checks", original_checks)
|
||||||
|
|
||||||
|
out = io.StringIO()
|
||||||
|
mock_check = mock.Mock(return_value=[])
|
||||||
|
register(mock_check, Tags.database)
|
||||||
|
|
||||||
|
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
|
||||||
|
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
|
||||||
|
mock_check.assert_called_once_with(app_configs=None, databases=["default"])
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
INSTALLED_APPS=[
|
INSTALLED_APPS=[
|
||||||
"migrations",
|
"migrations",
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,12 @@
|
||||||
import compileall
|
import compileall
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.db import connection, connections
|
from django.db import connection, connections
|
||||||
from django.db.migrations.exceptions import (
|
from django.db.migrations.exceptions import (
|
||||||
AmbiguityError,
|
AmbiguityError,
|
||||||
|
|
@ -649,6 +654,70 @@ class LoaderTests(TestCase):
|
||||||
test_module.__spec__.origin = module_origin
|
test_module.__spec__.origin = module_origin
|
||||||
test_module.__spec__.has_location = module_has_location
|
test_module.__spec__.has_location = module_has_location
|
||||||
|
|
||||||
|
def test_loading_order_does_not_create_circular_dependency(self):
|
||||||
|
"""
|
||||||
|
Before, for these migrations:
|
||||||
|
app1
|
||||||
|
[ ] 0001_squashed_initial <- replaces app1.0001
|
||||||
|
[ ] 0002_squashed_initial <- replaces app1.0001
|
||||||
|
depends on app1.0001_squashed_initial & app2.0001_squashed_initial
|
||||||
|
app2
|
||||||
|
[ ] 0001_squashed_initial <- replaces app2.0001
|
||||||
|
|
||||||
|
When loading app1's migrations, if 0002_squashed_initial was first:
|
||||||
|
{'0002_squashed_initial', '0001_initial', '0001_squashed_initial'}
|
||||||
|
Then CircularDependencyError was raised, but it's resolvable as:
|
||||||
|
{'0001_initial', '0001_squashed_initial', '0002_squashed_initial'}
|
||||||
|
"""
|
||||||
|
# Create a test settings file to provide to the subprocess.
|
||||||
|
MIGRATION_MODULES = {
|
||||||
|
"app1": "migrations.test_migrations_squashed_replaced_order.app1",
|
||||||
|
"app2": "migrations.test_migrations_squashed_replaced_order.app2",
|
||||||
|
}
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
"migrations.test_migrations_squashed_replaced_order.app1",
|
||||||
|
"migrations.test_migrations_squashed_replaced_order.app2",
|
||||||
|
]
|
||||||
|
tests_dir = Path(__file__).parent.parent
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
mode="w", encoding="utf-8", suffix=".py", dir=tests_dir, delete=False
|
||||||
|
) as test_settings:
|
||||||
|
for attr, value in settings._wrapped.__dict__.items():
|
||||||
|
if attr.isupper():
|
||||||
|
test_settings.write(f"{attr} = {value!r}\n")
|
||||||
|
# Provide overrides here, instead of via decorators.
|
||||||
|
test_settings.write(f"DATABASES = {settings.DATABASES}\n")
|
||||||
|
test_settings.write(f"MIGRATION_MODULES = {MIGRATION_MODULES}\n")
|
||||||
|
# Isolate away other test apps.
|
||||||
|
test_settings.write(
|
||||||
|
"INSTALLED_APPS=[a for a in INSTALLED_APPS if a.startswith('django')]\n"
|
||||||
|
)
|
||||||
|
test_settings.write(f"INSTALLED_APPS += {INSTALLED_APPS}\n")
|
||||||
|
test_settings_name = test_settings.name
|
||||||
|
self.addCleanup(os.remove, test_settings_name)
|
||||||
|
|
||||||
|
test_environ = os.environ.copy()
|
||||||
|
test_environ["PYTHONPATH"] = str(tests_dir)
|
||||||
|
# Ensure deterministic failures.
|
||||||
|
test_environ["PYTHONHASHSEED"] = "1"
|
||||||
|
|
||||||
|
args = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"django",
|
||||||
|
"showmigrations",
|
||||||
|
"app1",
|
||||||
|
"--skip-checks",
|
||||||
|
"--settings",
|
||||||
|
Path(test_settings_name).stem,
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
args, capture_output=True, env=test_environ, check=True, text=True
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError as err:
|
||||||
|
self.fail(err.stderr)
|
||||||
|
|
||||||
|
|
||||||
class PycLoaderTests(MigrationTestBase):
|
class PycLoaderTests(MigrationTestBase):
|
||||||
def test_valid(self):
|
def test_valid(self):
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
operations = []
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
replaces = [
|
||||||
|
("app1", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
operations = []
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
replaces = [
|
||||||
|
("app1", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("app1", "0001_squashed_initial"),
|
||||||
|
("app2", "0001_squashed_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("app1", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
||||||
|
|
@ -0,0 +1,13 @@
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
replaces = [
|
||||||
|
("app2", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
operations = []
|
||||||
|
|
@ -1237,6 +1237,12 @@ class WriterTests(SimpleTestCase):
|
||||||
self.assertEqual(result.args, instance.args)
|
self.assertEqual(result.args, instance.args)
|
||||||
self.assertEqual(result.kwargs, instance.kwargs)
|
self.assertEqual(result.kwargs, instance.kwargs)
|
||||||
|
|
||||||
|
def test_serialize_generic_alias(self):
|
||||||
|
self.assertSerializedEqual(dict[str, float])
|
||||||
|
|
||||||
|
def test_serialize_generic_alias_complex_args(self):
|
||||||
|
self.assertSerializedEqual(dict[str, models.Manager])
|
||||||
|
|
||||||
def test_register_serializer(self):
|
def test_register_serializer(self):
|
||||||
class ComplexSerializer(BaseSerializer):
|
class ComplexSerializer(BaseSerializer):
|
||||||
def serialize(self):
|
def serialize(self):
|
||||||
|
|
|
||||||
|
|
@ -1265,6 +1265,13 @@ class JSONNullTests(TestCase):
|
||||||
def test_repr(self):
|
def test_repr(self):
|
||||||
self.assertEqual(repr(JSONNull()), "JSONNull()")
|
self.assertEqual(repr(JSONNull()), "JSONNull()")
|
||||||
|
|
||||||
|
def test_deconstruct(self):
|
||||||
|
jsonnull = JSONNull()
|
||||||
|
path, args, kwargs = jsonnull.deconstruct()
|
||||||
|
self.assertEqual(path, "django.db.models.JSONNull")
|
||||||
|
self.assertEqual(args, ())
|
||||||
|
self.assertEqual(kwargs, {})
|
||||||
|
|
||||||
def test_save_load(self):
|
def test_save_load(self):
|
||||||
obj = JSONModel(value=JSONNull())
|
obj = JSONModel(value=JSONNull())
|
||||||
obj.save()
|
obj.save()
|
||||||
|
|
|
||||||
|
|
@ -543,3 +543,22 @@ class ConstraintsModel(models.Model):
|
||||||
violation_error_message="Price must be greater than zero.",
|
violation_error_message="Price must be greater than zero.",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class AttnameConstraintsModel(models.Model):
|
||||||
|
left = models.ForeignKey(
|
||||||
|
"self", related_name="+", null=True, on_delete=models.SET_NULL
|
||||||
|
)
|
||||||
|
right = models.ForeignKey(
|
||||||
|
"self", related_name="+", null=True, on_delete=models.SET_NULL
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {"supports_table_check_constraints"}
|
||||||
|
constraints = [
|
||||||
|
models.CheckConstraint(
|
||||||
|
name="%(app_label)s_%(class)s_left_not_right",
|
||||||
|
# right_id here is the ForeignKey's attname, not name.
|
||||||
|
condition=~models.Q(left=models.F("right_id")),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ from django.utils.version import PY314, PYPY
|
||||||
from .models import (
|
from .models import (
|
||||||
Article,
|
Article,
|
||||||
ArticleStatus,
|
ArticleStatus,
|
||||||
|
AttnameConstraintsModel,
|
||||||
Author,
|
Author,
|
||||||
Author1,
|
Author1,
|
||||||
Award,
|
Award,
|
||||||
|
|
@ -976,15 +977,15 @@ class TestFieldOverridesByFormMeta(SimpleTestCase):
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
form["name"].legend_tag(),
|
form["name"].legend_tag(),
|
||||||
'<legend for="id_name">Title:</legend>',
|
"<legend>Title:</legend>",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
form["url"].legend_tag(),
|
form["url"].legend_tag(),
|
||||||
'<legend for="id_url">The URL:</legend>',
|
"<legend>The URL:</legend>",
|
||||||
)
|
)
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
form["slug"].legend_tag(),
|
form["slug"].legend_tag(),
|
||||||
'<legend for="id_slug">Slug:</legend>',
|
"<legend>Slug:</legend>",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_help_text_overrides(self):
|
def test_help_text_overrides(self):
|
||||||
|
|
@ -3766,3 +3767,17 @@ class ConstraintValidationTests(TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
full_form.errors, {"__all__": ["Price must be greater than zero."]}
|
full_form.errors, {"__all__": ["Price must be greater than zero."]}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_check_constraint_refs_excluded_field_attname(self):
|
||||||
|
left = AttnameConstraintsModel.objects.create()
|
||||||
|
instance = AttnameConstraintsModel.objects.create(left=left)
|
||||||
|
data = {
|
||||||
|
"left": str(left.id),
|
||||||
|
"right": "",
|
||||||
|
}
|
||||||
|
AttnameConstraintsModelForm = modelform_factory(
|
||||||
|
AttnameConstraintsModel, fields="__all__"
|
||||||
|
)
|
||||||
|
full_form = AttnameConstraintsModelForm(data, instance=instance)
|
||||||
|
self.assertFalse(full_form.is_valid())
|
||||||
|
self.assertEqual(full_form.errors, {"right": ["This field is required."]})
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue