hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
20489ec9839b3d8dc101101e79b57fe957331a082dcd28c10578a482ecf2c7e8 | import asyncio
import sys
import threading
from pathlib import Path
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.asgi import get_asgi_application
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
from django.test import (
AsyncRequestFactory,
SimpleTestCase,
ignore_warnings,
modify_settings,
override_settings,
)
from django.utils.http import http_date
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
@override_settings(ROOT_URLCONF="asgi.urls")
class ASGITest(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"12"),
(b"Content-Type", b"text/html; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Allow response.close() to finish.
await communicator.wait()
# Python's file API is not async compatible. A third-party library such
# as https://github.com/Tinche/aiofiles allows passing the file to
# FileResponse as an async interator. With a sync iterator
# StreamingHTTPResponse triggers a warning when iterating the file.
# assertWarnsMessage is not async compatible, so ignore_warnings for the
# test.
@ignore_warnings(module="django.http.response")
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/file/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
with open(test_filename, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
headers = response_start["headers"]
self.assertEqual(len(headers), 3)
expected_headers = {
b"Content-Length": str(len(test_file_contents)).encode("ascii"),
b"Content-Type": b"text/x-python",
b"Content-Disposition": b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == "win32" and key == b"Content-Type":
self.assertEqual(value, b"text/plain")
else:
raise
# Warning ignored here.
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={"append": "django.contrib.staticfiles"})
@override_settings(
STATIC_URL="static/",
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
"django.contrib.staticfiles.finders.FileSystemFinder",
],
)
@ignore_warnings(module="django.http.response")
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/static/file.txt")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
file_path = TEST_STATIC_ROOT / "file.txt"
with open(file_path, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", str(len(test_file_contents)).encode("ascii")),
(b"Content-Type", b"text/plain"),
(b"Content-Disposition", b'inline; filename="file.txt"'),
(b"Last-Modified", http_date(stat.st_mtime).encode("ascii")),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path="/meta/",
headers=[
[b"content-type", b"text/plain; charset=utf-8"],
[b"content-length", b"77"],
[b"referer", b"Scotland"],
[b"referer", b"Wales"],
],
),
)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"19"),
(b"Content-Type", b"text/plain; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"From Scotland,Wales")
# Allow response.close() to finish
await communicator.wait()
async def test_post_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(
method="POST",
path="/post/",
query_string="echo=1",
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"Echo!"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Echo!")
async def test_untouched_request_body_gets_closed(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(method="POST", path="/post/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 204)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
# Allow response.close() to finish
await communicator.wait()
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b"name=Andrew", "name=Andrew"):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path="/",
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello Andrew!")
# Allow response.close() to finish
await communicator.wait()
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", type="other")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
msg = "Django can only handle ASGI/HTTP connections, not other."
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", query_string=b"\xff")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
class SignalHandler:
"""Track threads handler is dispatched on."""
threads = []
def __call__(self, **kwargs):
self.threads.append(threading.current_thread())
signal_handler = SignalHandler()
request_started.connect(signal_handler)
request_finished.connect(signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
request_started_thread, request_finished_thread = signal_handler.threads
self.assertEqual(request_started_thread, request_finished_thread)
request_started.disconnect(signal_handler)
request_finished.disconnect(signal_handler)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/wait/")
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({"type": "http.request"})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
|
1bd2934d18ec338067357e57e2083a528e8869ec238d29d3844c7db709326bf3 | import json
import os
import shutil
import sys
import tempfile
import unittest
from io import StringIO
from pathlib import Path
from unittest import mock
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.management.commands.collectstatic import (
Command as CollectstaticCommand,
)
from django.core.management import call_command
from django.test import SimpleTestCase, override_settings
from .cases import CollectionTestCase
from .settings import TEST_ROOT
def hashed_file_path(test, path):
fullpath = test.render_template(test.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, "")
class TestHashedFiles:
hashed_file_path = hashed_file_path
def tearDown(self):
# Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear()
def assertPostCondition(self):
"""
Assert post conditions for a test are met. Must be manually called at
the end of each test.
"""
pass
def test_template_tag_return(self):
self.assertStaticRaises(
ValueError, "does/not/exist.png", "/static/does/not/exist.png"
)
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders(
"test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True
)
self.assertStaticRenders(
"cached/styles.css", "/static/cached/styles.5e0040571e1a.css"
)
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
self.assertPostCondition()
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css")
self.assertEqual(relpath, "cached/css/ignored.55e7c226dda1.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"#foobar", content)
self.assertIn(b"http:foobar", content)
self.assertIn(b"https:foobar", content)
self.assertIn(b"data:foobar", content)
self.assertIn(b"chrome:foobar", content)
self.assertIn(b"//foobar", content)
self.assertIn(b"url()", content)
self.assertPostCondition()
def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs")
with storage.staticfiles_storage.open(
"cached/styles.5e0040571e1a.css"
) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs")
with storage.staticfiles_storage.open(
"cached/styles.5e0040571e1a.css"
) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"fonts/font.b9b105392eb8.eot?#iefix", content)
self.assertIn(b"fonts/font.b8d603e42714.svg#webfontIyfZbseF", content)
self.assertIn(
b"fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg", content
)
self.assertIn(
b"data:font/woff;charset=utf-8;"
b"base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA",
content,
)
self.assertIn(b"#default#VML", content)
self.assertPostCondition()
def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content)
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertIn(b"/static/cached/img/relative.acae32e4532b.png", content)
self.assertPostCondition()
def test_template_tag_absolute_root(self):
"""
Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249).
"""
relpath = self.hashed_file_path("absolute_root.css")
self.assertEqual(relpath, "absolute_root.f821df1b64f7.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertPostCondition()
def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b"url(img/relative.png)", content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.5e0040571e1a.css", content)
self.assertPostCondition()
def test_import_replacement(self):
"See #18050"
relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read())
self.assertPostCondition()
def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.5d5c10836967.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"url(img/window.png)", content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content)
self.assertPostCondition()
def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read())
self.assertPostCondition()
def test_module_import(self):
relpath = self.hashed_file_path("cached/module.js")
self.assertEqual(relpath, "cached/module.55fd6938fbc5.js")
tests = [
# Relative imports.
b'import testConst from "./module_test.477bbebe77f0.js";',
b'import relativeModule from "../nested/js/nested.866475c46bb4.js";',
b'import { firstConst, secondConst } from "./module_test.477bbebe77f0.js";',
# Absolute import.
b'import rootConst from "/static/absolute_root.5586327fe78c.js";',
# Dynamic import.
b'const dynamicModule = import("./module_test.477bbebe77f0.js");',
# Creating a module object.
b'import * as NewModule from "./module_test.477bbebe77f0.js";',
# Aliases.
b'import { testConst as alias } from "./module_test.477bbebe77f0.js";',
b"import {\n"
b" firstVar1 as firstVarAlias,\n"
b" $second_var_2 as secondVarAlias\n"
b'} from "./module_test.477bbebe77f0.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
self.assertPostCondition()
def test_aggregating_modules(self):
relpath = self.hashed_file_path("cached/module.js")
self.assertEqual(relpath, "cached/module.55fd6938fbc5.js")
tests = [
b'export * from "./module_test.477bbebe77f0.js";',
b'export { testConst } from "./module_test.477bbebe77f0.js";',
b"export {\n"
b" firstVar as firstVarAlias,\n"
b" secondVar as secondVarAlias\n"
b'} from "./module_test.477bbebe77f0.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "loop")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
)
def test_import_loop(self):
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaisesMessage(RuntimeError, "Max post-process passes exceeded"):
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue())
self.assertPostCondition()
def test_post_processing(self):
"""
post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this testcase,
therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
"interactive": False,
"verbosity": 0,
"link": False,
"clear": False,
"dry_run": False,
"post_process": True,
"use_default_ignore_patterns": True,
"ignore_patterns": ["*.ignoreme"],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertIn(
os.path.join("cached", "css", "window.css"), stats["post_processed"]
)
self.assertIn(
os.path.join("cached", "css", "img", "window.png"), stats["unmodified"]
)
self.assertIn(os.path.join("test", "nonascii.css"), stats["post_processed"])
# No file should be yielded twice.
self.assertCountEqual(stats["post_processed"], set(stats["post_processed"]))
self.assertPostCondition()
def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_css_source_map(self):
relpath = self.hashed_file_path("cached/source_map.css")
self.assertEqual(relpath, "cached/source_map.b2fceaf426aa.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/*# sourceMappingURL=source_map.css.map*/", content)
self.assertIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_css_source_map_tabs(self):
relpath = self.hashed_file_path("cached/source_map_tabs.css")
self.assertEqual(relpath, "cached/source_map_tabs.b2fceaf426aa.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/*#\tsourceMappingURL=source_map.css.map\t*/", content)
self.assertIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_css_source_map_sensitive(self):
relpath = self.hashed_file_path("cached/source_map_sensitive.css")
self.assertEqual(relpath, "cached/source_map_sensitive.456683f2106f.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"/*# sOuRcEMaPpInGURL=source_map.css.map */", content)
self.assertNotIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_js_source_map(self):
relpath = self.hashed_file_path("cached/source_map.js")
self.assertEqual(relpath, "cached/source_map.cd45b8534a87.js")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"//# sourceMappingURL=source_map.js.map", content)
self.assertIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
def test_js_source_map_trailing_whitespace(self):
relpath = self.hashed_file_path("cached/source_map_trailing_whitespace.js")
self.assertEqual(
relpath, "cached/source_map_trailing_whitespace.cd45b8534a87.js"
)
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"//# sourceMappingURL=source_map.js.map\t ", content)
self.assertIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
def test_js_source_map_sensitive(self):
relpath = self.hashed_file_path("cached/source_map_sensitive.js")
self.assertEqual(relpath, "cached/source_map_sensitive.5da96fdd3cb3.js")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"//# sOuRcEMaPpInGURL=source_map.js.map", content)
self.assertNotIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "faulty")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
)
def test_post_processing_failure(self):
"""
post_processing indicates the origin of the error when it fails.
"""
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaises(Exception):
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@override_settings(STATICFILES_STORAGE="staticfiles_tests.storage.ExtraPatternsStorage")
class TestExtraPatternsStorage(CollectionTestCase):
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def cached_file_path(self, path):
fullpath = self.render_template(self.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, "")
def test_multi_extension_patterns(self):
"""
With storage classes having several file extension patterns, only the
files matching a specific file pattern should be affected by the
substitution (#19670).
"""
# CSS files shouldn't be touched by JS patterns.
relpath = self.cached_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read())
# Confirm JS patterns have been applied to JS files.
relpath = self.cached_file_path("cached/test.js")
self.assertEqual(relpath, "cached/test.388d7a790d46.js")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read())
@override_settings(
STATICFILES_STORAGE="django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
)
class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
"""
Tests for the Cache busting storage
"""
def setUp(self):
super().setUp()
temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, "test"))
self._clear_filename = os.path.join(temp_dir, "test", "cleared.txt")
with open(self._clear_filename, "w") as f:
f.write("to be deleted in one test")
self.patched_settings = self.settings(
STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir],
)
self.patched_settings.enable()
self.addCleanup(shutil.rmtree, temp_dir)
self._manifest_strict = storage.staticfiles_storage.manifest_strict
def tearDown(self):
self.patched_settings.disable()
if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename)
storage.staticfiles_storage.manifest_strict = self._manifest_strict
super().tearDown()
def assertPostCondition(self):
hashed_files = storage.staticfiles_storage.hashed_files
# The in-memory version of the manifest matches the one on disk
# since a properly created manifest should cover all filenames.
if hashed_files:
manifest, _ = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename)
self.assertTrue(os.path.exists(path))
def test_manifest_does_not_exist(self):
storage.staticfiles_storage.manifest_name = "does.not.exist.json"
self.assertIsNone(storage.staticfiles_storage.read_manifest())
def test_manifest_does_not_ignore_permission_error(self):
with mock.patch("builtins.open", side_effect=PermissionError):
with self.assertRaises(PermissionError):
storage.staticfiles_storage.read_manifest()
def test_loaded_cache(self):
self.assertNotEqual(storage.staticfiles_storage.hashed_files, {})
manifest_content = storage.staticfiles_storage.read_manifest()
self.assertIn(
'"version": "%s"' % storage.staticfiles_storage.manifest_version,
manifest_content,
)
def test_parse_cache(self):
hashed_files = storage.staticfiles_storage.hashed_files
manifest, _ = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self):
cleared_file_name = storage.staticfiles_storage.clean_name(
os.path.join("test", "cleared.txt")
)
# collect the additional file
self.run_collectstatic()
hashed_files = storage.staticfiles_storage.hashed_files
self.assertIn(cleared_file_name, hashed_files)
manifest_content, _ = storage.staticfiles_storage.load_manifest()
self.assertIn(cleared_file_name, manifest_content)
original_path = storage.staticfiles_storage.path(cleared_file_name)
self.assertTrue(os.path.exists(original_path))
# delete the original file form the app, collect with clear
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
self.assertFileNotFound(original_path)
hashed_files = storage.staticfiles_storage.hashed_files
self.assertNotIn(cleared_file_name, hashed_files)
manifest_content, _ = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content)
def test_missing_entry(self):
missing_file_name = "cached/missing.css"
configured_storage = storage.staticfiles_storage
self.assertNotIn(missing_file_name, configured_storage.hashed_files)
# File name not found in manifest
with self.assertRaisesMessage(
ValueError,
"Missing staticfiles manifest entry for '%s'" % missing_file_name,
):
self.hashed_file_path(missing_file_name)
configured_storage.manifest_strict = False
# File doesn't exist on disk
err_msg = "The file '%s' could not be found with %r." % (
missing_file_name,
configured_storage._wrapped,
)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path(missing_file_name)
content = StringIO()
content.write("Found")
configured_storage.save(missing_file_name, content)
# File exists on disk
self.hashed_file_path(missing_file_name)
def test_intermediate_files(self):
cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, "cached"))
# Intermediate files shouldn't be created for reference.
self.assertEqual(
len(
[
cached_file
for cached_file in cached_files
if cached_file.startswith("relative.")
]
),
2,
)
def test_manifest_hash(self):
# Collect the additional file.
self.run_collectstatic()
_, manifest_hash_orig = storage.staticfiles_storage.load_manifest()
self.assertNotEqual(manifest_hash_orig, "")
self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig)
# Saving doesn't change the hash.
storage.staticfiles_storage.save_manifest()
self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig)
# Delete the original file from the app, collect with clear.
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
# Hash is changed.
_, manifest_hash = storage.staticfiles_storage.load_manifest()
self.assertNotEqual(manifest_hash, manifest_hash_orig)
def test_manifest_hash_v1(self):
storage.staticfiles_storage.manifest_name = "staticfiles_v1.json"
manifest_content, manifest_hash = storage.staticfiles_storage.load_manifest()
self.assertEqual(manifest_hash, "")
self.assertEqual(manifest_content, {"dummy.txt": "dummy.txt"})
@override_settings(STATICFILES_STORAGE="staticfiles_tests.storage.NoneHashStorage")
class TestCollectionNoneHashStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def test_hashed_name(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.css")
@override_settings(
STATICFILES_STORAGE="staticfiles_tests.storage.NoPostProcessReplacedPathStorage"
)
class TestCollectionNoPostProcessReplacedPaths(CollectionTestCase):
run_collectstatic_in_setUp = False
def test_collectstatistic_no_post_process_replaced_paths(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout)
self.assertIn("post-processed", stdout.getvalue())
@override_settings(STATICFILES_STORAGE="staticfiles_tests.storage.SimpleStorage")
class TestCollectionSimpleStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def test_template_tag_return(self):
self.assertStaticRaises(
ValueError, "does/not/exist.png", "/static/does/not/exist.png"
)
self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt")
self.assertStaticRenders(
"cached/styles.css", "/static/cached/styles.deploy12345.css"
)
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.deploy12345.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.deploy12345.css", content)
class CustomManifestStorage(storage.ManifestStaticFilesStorage):
def __init__(self, *args, manifest_storage=None, **kwargs):
manifest_storage = storage.StaticFilesStorage(
location=kwargs.pop("manifest_location"),
)
super().__init__(*args, manifest_storage=manifest_storage, **kwargs)
class TestCustomManifestStorage(SimpleTestCase):
def setUp(self):
self.manifest_path = Path(tempfile.mkdtemp())
self.addCleanup(shutil.rmtree, self.manifest_path)
self.staticfiles_storage = CustomManifestStorage(
manifest_location=self.manifest_path,
)
self.manifest_file = self.manifest_path / self.staticfiles_storage.manifest_name
# Manifest without paths.
self.manifest = {"version": self.staticfiles_storage.manifest_version}
with self.manifest_file.open("w") as manifest_file:
json.dump(self.manifest, manifest_file)
def test_read_manifest(self):
self.assertEqual(
self.staticfiles_storage.read_manifest(),
json.dumps(self.manifest),
)
def test_read_manifest_nonexistent(self):
os.remove(self.manifest_file)
self.assertIsNone(self.staticfiles_storage.read_manifest())
def test_save_manifest_override(self):
self.assertIs(self.manifest_file.exists(), True)
self.staticfiles_storage.save_manifest()
self.assertIs(self.manifest_file.exists(), True)
new_manifest = json.loads(self.staticfiles_storage.read_manifest())
self.assertIn("paths", new_manifest)
self.assertNotEqual(new_manifest, self.manifest)
def test_save_manifest_create(self):
os.remove(self.manifest_file)
self.staticfiles_storage.save_manifest()
self.assertIs(self.manifest_file.exists(), True)
new_manifest = json.loads(self.staticfiles_storage.read_manifest())
self.assertIn("paths", new_manifest)
self.assertNotEqual(new_manifest, self.manifest)
class CustomStaticFilesStorage(storage.StaticFilesStorage):
"""
Used in TestStaticFilePermissions
"""
def __init__(self, *args, **kwargs):
kwargs["file_permissions_mode"] = 0o640
kwargs["directory_permissions_mode"] = 0o740
super().__init__(*args, **kwargs)
@unittest.skipIf(sys.platform == "win32", "Windows only partially supports chmod.")
class TestStaticFilePermissions(CollectionTestCase):
command_params = {
"interactive": False,
"verbosity": 0,
"ignore_patterns": ["*.ignoreme"],
}
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
super().setUp()
def tearDown(self):
os.umask(self.old_umask)
super().tearDown()
# Don't run collectstatic command in this test class.
def run_collectstatic(self, **kwargs):
pass
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
)
def test_collect_static_files_permissions(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o655)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(
FILE_UPLOAD_PERMISSIONS=None,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=None,
)
def test_collect_static_files_default_permissions(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o666 & ~self.umask)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
STATICFILES_STORAGE="staticfiles_tests.test_storage.CustomStaticFilesStorage",
)
def test_collect_static_files_subclass_of_static_storage(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o640)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o740)
@override_settings(
STATICFILES_STORAGE="django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
)
class TestCollectionHashedFilesCache(CollectionTestCase):
"""
Files referenced from CSS use the correct final hashed name regardless of
the order in which the files are post-processed.
"""
hashed_file_path = hashed_file_path
def setUp(self):
super().setUp()
self._temp_dir = temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, "test"))
self.addCleanup(shutil.rmtree, temp_dir)
def _get_filename_path(self, filename):
return os.path.join(self._temp_dir, "test", filename)
def test_file_change_after_collectstatic(self):
# Create initial static files.
file_contents = (
("foo.png", "foo"),
("bar.css", 'url("foo.png")\nurl("xyz.png")'),
("xyz.png", "xyz"),
)
for filename, content in file_contents:
with open(self._get_filename_path(filename), "w") as f:
f.write(content)
with self.modify_settings(STATICFILES_DIRS={"append": self._temp_dir}):
finders.get_finder.cache_clear()
err = StringIO()
# First collectstatic run.
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path("test/bar.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"foo.acbd18db4cc2.png", content)
self.assertIn(b"xyz.d16fb36f0911.png", content)
# Change the contents of the png files.
for filename in ("foo.png", "xyz.png"):
with open(self._get_filename_path(filename), "w+b") as f:
f.write(b"new content of file to change its hash")
# The hashes of the png files in the CSS file are updated after
# a second collectstatic.
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path("test/bar.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"foo.57a5cb9ba68d.png", content)
self.assertIn(b"xyz.57a5cb9ba68d.png", content)
|
93e42312b8ea9faf58d5e28982c6558033e3f478dea988955abb03c1e9533de6 | """
Regression tests for the Test Client, especially the customized assertions.
"""
import itertools
import os
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.http import HttpResponse
from django.template import Context, RequestContext, TemplateSyntaxError, engines
from django.template.response import SimpleTemplateResponse
from django.test import (
Client,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.test.client import RedirectCycleError, RequestFactory, encode_file
from django.test.utils import ContextList
from django.urls import NoReverseMatch, reverse
from django.utils.translation import gettext_lazy
from .models import CustomUser
from .views import CustomTestException
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username="testclient", password="password")
cls.staff = User.objects.create_user(
username="staff", password="password", is_staff=True
)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class AssertContainsTests(SimpleTestCase):
def test_contains(self):
"Responses can be inspected for content, including counting repeated substrings"
response = self.client.get("/no_template_view/")
self.assertNotContains(response, "never")
self.assertContains(response, "never", 0)
self.assertContains(response, "once")
self.assertContains(response, "once", 1)
self.assertContains(response, "twice")
self.assertContains(response, "twice", 2)
try:
self.assertContains(response, "text", status_code=999)
except AssertionError as e:
self.assertIn(
"Couldn't retrieve content: Response code was 200 (expected 999)",
str(e),
)
try:
self.assertContains(response, "text", status_code=999, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Couldn't retrieve content: Response code was 200 (expected 999)",
str(e),
)
try:
self.assertNotContains(response, "text", status_code=999)
except AssertionError as e:
self.assertIn(
"Couldn't retrieve content: Response code was 200 (expected 999)",
str(e),
)
try:
self.assertNotContains(response, "text", status_code=999, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Couldn't retrieve content: Response code was 200 (expected 999)",
str(e),
)
try:
self.assertNotContains(response, "once")
except AssertionError as e:
self.assertIn("Response should not contain 'once'", str(e))
try:
self.assertNotContains(response, "once", msg_prefix="abc")
except AssertionError as e:
self.assertIn("abc: Response should not contain 'once'", str(e))
try:
self.assertContains(response, "never", 1)
except AssertionError as e:
self.assertIn(
"Found 0 instances of 'never' in response (expected 1)", str(e)
)
try:
self.assertContains(response, "never", 1, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Found 0 instances of 'never' in response (expected 1)", str(e)
)
try:
self.assertContains(response, "once", 0)
except AssertionError as e:
self.assertIn(
"Found 1 instances of 'once' in response (expected 0)", str(e)
)
try:
self.assertContains(response, "once", 0, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Found 1 instances of 'once' in response (expected 0)", str(e)
)
try:
self.assertContains(response, "once", 2)
except AssertionError as e:
self.assertIn(
"Found 1 instances of 'once' in response (expected 2)", str(e)
)
try:
self.assertContains(response, "once", 2, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Found 1 instances of 'once' in response (expected 2)", str(e)
)
try:
self.assertContains(response, "twice", 1)
except AssertionError as e:
self.assertIn(
"Found 2 instances of 'twice' in response (expected 1)", str(e)
)
try:
self.assertContains(response, "twice", 1, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Found 2 instances of 'twice' in response (expected 1)", str(e)
)
try:
self.assertContains(response, "thrice")
except AssertionError as e:
self.assertIn("Couldn't find 'thrice' in response", str(e))
try:
self.assertContains(response, "thrice", msg_prefix="abc")
except AssertionError as e:
self.assertIn("abc: Couldn't find 'thrice' in response", str(e))
try:
self.assertContains(response, "thrice", 3)
except AssertionError as e:
self.assertIn(
"Found 0 instances of 'thrice' in response (expected 3)", str(e)
)
try:
self.assertContains(response, "thrice", 3, msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Found 0 instances of 'thrice' in response (expected 3)", str(e)
)
def test_unicode_contains(self):
"Unicode characters can be found in template context"
# Regression test for #10183
r = self.client.get("/check_unicode/")
self.assertContains(r, "さかき")
self.assertContains(r, b"\xe5\xb3\xa0".decode())
def test_unicode_not_contains(self):
"Unicode characters can be searched for, and not found in template context"
# Regression test for #10183
r = self.client.get("/check_unicode/")
self.assertNotContains(r, "はたけ")
self.assertNotContains(r, b"\xe3\x81\xaf\xe3\x81\x9f\xe3\x81\x91".decode())
def test_binary_contains(self):
r = self.client.get("/check_binary/")
self.assertContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e")
with self.assertRaises(AssertionError):
self.assertContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e", count=2)
def test_binary_not_contains(self):
r = self.client.get("/check_binary/")
self.assertNotContains(r, b"%ODF-1.4\r\n%\x93\x8c\x8b\x9e")
with self.assertRaises(AssertionError):
self.assertNotContains(r, b"%PDF-1.4\r\n%\x93\x8c\x8b\x9e")
def test_nontext_contains(self):
r = self.client.get("/no_template_view/")
self.assertContains(r, gettext_lazy("once"))
def test_nontext_not_contains(self):
r = self.client.get("/no_template_view/")
self.assertNotContains(r, gettext_lazy("never"))
def test_assert_contains_renders_template_response(self):
"""
An unrendered SimpleTemplateResponse may be used in assertContains().
"""
template = engines["django"].from_string("Hello")
response = SimpleTemplateResponse(template)
self.assertContains(response, "Hello")
def test_assert_contains_using_non_template_response(self):
"""auto-rendering does not affect responses that aren't
instances (or subclasses) of SimpleTemplateResponse.
Refs #15826.
"""
response = HttpResponse("Hello")
self.assertContains(response, "Hello")
def test_assert_not_contains_renders_template_response(self):
"""
An unrendered SimpleTemplateResponse may be used in assertNotContains().
"""
template = engines["django"].from_string("Hello")
response = SimpleTemplateResponse(template)
self.assertNotContains(response, "Bye")
def test_assert_not_contains_using_non_template_response(self):
"""
auto-rendering does not affect responses that aren't instances (or
subclasses) of SimpleTemplateResponse.
"""
response = HttpResponse("Hello")
self.assertNotContains(response, "Bye")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class AssertTemplateUsedTests(TestDataMixin, TestCase):
def test_no_context(self):
"Template usage assertions work then templates aren't in use"
response = self.client.get("/no_template_view/")
# The no template case doesn't mess with the template assertions
self.assertTemplateNotUsed(response, "GET Template")
try:
self.assertTemplateUsed(response, "GET Template")
except AssertionError as e:
self.assertIn("No templates used to render the response", str(e))
try:
self.assertTemplateUsed(response, "GET Template", msg_prefix="abc")
except AssertionError as e:
self.assertIn("abc: No templates used to render the response", str(e))
msg = "No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateUsed(response, "GET Template", count=2)
def test_single_context(self):
"Template assertions work when there is a single context"
response = self.client.get("/post_view/", {})
msg = (
": Template 'Empty GET Template' was used unexpectedly in "
"rendering the response"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateNotUsed(response, "Empty GET Template")
with self.assertRaisesMessage(AssertionError, "abc" + msg):
self.assertTemplateNotUsed(response, "Empty GET Template", msg_prefix="abc")
msg = (
": Template 'Empty POST Template' was not a template used to "
"render the response. Actual template(s) used: Empty GET Template"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateUsed(response, "Empty POST Template")
with self.assertRaisesMessage(AssertionError, "abc" + msg):
self.assertTemplateUsed(response, "Empty POST Template", msg_prefix="abc")
msg = (
": Template 'Empty GET Template' was expected to be rendered 2 "
"time(s) but was actually rendered 1 time(s)."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateUsed(response, "Empty GET Template", count=2)
with self.assertRaisesMessage(AssertionError, "abc" + msg):
self.assertTemplateUsed(
response, "Empty GET Template", msg_prefix="abc", count=2
)
def test_multiple_context(self):
"Template assertions work when there are multiple contexts"
post_data = {
"text": "Hello World",
"email": "[email protected]",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data OK")
msg = "Template '%s' was used unexpectedly in rendering the response"
with self.assertRaisesMessage(AssertionError, msg % "form_view.html"):
self.assertTemplateNotUsed(response, "form_view.html")
with self.assertRaisesMessage(AssertionError, msg % "base.html"):
self.assertTemplateNotUsed(response, "base.html")
msg = (
"Template 'Valid POST Template' was not a template used to render "
"the response. Actual template(s) used: form_view.html, base.html"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateUsed(response, "Valid POST Template")
msg = (
"Template 'base.html' was expected to be rendered 2 time(s) but "
"was actually rendered 1 time(s)."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertTemplateUsed(response, "base.html", count=2)
def test_template_rendered_multiple_times(self):
"""Template assertions work when a template is rendered multiple times."""
response = self.client.get("/render_template_multiple_times/")
self.assertTemplateUsed(response, "base.html", count=2)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class AssertRedirectsTests(SimpleTestCase):
def test_redirect_page(self):
"An assertion is raised if the original page couldn't be retrieved as expected"
# This page will redirect with code 301, not 302
response = self.client.get("/permanent_redirect_view/")
try:
self.assertRedirects(response, "/get_view/")
except AssertionError as e:
self.assertIn(
"Response didn't redirect as expected: Response code was 301 "
"(expected 302)",
str(e),
)
try:
self.assertRedirects(response, "/get_view/", msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Response didn't redirect as expected: Response code was 301 "
"(expected 302)",
str(e),
)
def test_lost_query(self):
"""
An assertion is raised if the redirect location doesn't preserve GET
parameters.
"""
response = self.client.get("/redirect_view/", {"var": "value"})
try:
self.assertRedirects(response, "/get_view/")
except AssertionError as e:
self.assertIn(
"Response redirected to '/get_view/?var=value', expected '/get_view/'",
str(e),
)
try:
self.assertRedirects(response, "/get_view/", msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Response redirected to '/get_view/?var=value', expected "
"'/get_view/'",
str(e),
)
def test_incorrect_target(self):
"An assertion is raised if the response redirects to another target"
response = self.client.get("/permanent_redirect_view/")
try:
# Should redirect to get_view
self.assertRedirects(response, "/some_view/")
except AssertionError as e:
self.assertIn(
"Response didn't redirect as expected: Response code was 301 "
"(expected 302)",
str(e),
)
def test_target_page(self):
"""
An assertion is raised if the response redirect target cannot be
retrieved as expected.
"""
response = self.client.get("/double_redirect_view/")
try:
# The redirect target responds with a 301 code, not 200
self.assertRedirects(response, "http://testserver/permanent_redirect_view/")
except AssertionError as e:
self.assertIn(
"Couldn't retrieve redirection page '/permanent_redirect_view/': "
"response code was 301 (expected 200)",
str(e),
)
try:
# The redirect target responds with a 301 code, not 200
self.assertRedirects(
response, "http://testserver/permanent_redirect_view/", msg_prefix="abc"
)
except AssertionError as e:
self.assertIn(
"abc: Couldn't retrieve redirection page '/permanent_redirect_view/': "
"response code was 301 (expected 200)",
str(e),
)
def test_redirect_chain(self):
"You can follow a redirect chain of multiple redirects"
response = self.client.get("/redirects/further/more/", {}, follow=True)
self.assertRedirects(
response, "/no_template_view/", status_code=302, target_status_code=200
)
self.assertEqual(len(response.redirect_chain), 1)
self.assertEqual(response.redirect_chain[0], ("/no_template_view/", 302))
def test_multiple_redirect_chain(self):
"You can follow a redirect chain of multiple redirects"
response = self.client.get("/redirects/", {}, follow=True)
self.assertRedirects(
response, "/no_template_view/", status_code=302, target_status_code=200
)
self.assertEqual(len(response.redirect_chain), 3)
self.assertEqual(response.redirect_chain[0], ("/redirects/further/", 302))
self.assertEqual(response.redirect_chain[1], ("/redirects/further/more/", 302))
self.assertEqual(response.redirect_chain[2], ("/no_template_view/", 302))
def test_redirect_chain_to_non_existent(self):
"You can follow a chain to a nonexistent view."
response = self.client.get("/redirect_to_non_existent_view2/", {}, follow=True)
self.assertRedirects(
response, "/non_existent_view/", status_code=302, target_status_code=404
)
def test_redirect_chain_to_self(self):
"Redirections to self are caught and escaped"
with self.assertRaises(RedirectCycleError) as context:
self.client.get("/redirect_to_self/", {}, follow=True)
response = context.exception.last_response
# The chain of redirects stops once the cycle is detected.
self.assertRedirects(
response, "/redirect_to_self/", status_code=302, target_status_code=302
)
self.assertEqual(len(response.redirect_chain), 2)
def test_redirect_to_self_with_changing_query(self):
"Redirections don't loop forever even if query is changing"
with self.assertRaises(RedirectCycleError):
self.client.get(
"/redirect_to_self_with_changing_query_view/",
{"counter": "0"},
follow=True,
)
def test_circular_redirect(self):
"Circular redirect chains are caught and escaped"
with self.assertRaises(RedirectCycleError) as context:
self.client.get("/circular_redirect_1/", {}, follow=True)
response = context.exception.last_response
# The chain of redirects will get back to the starting point, but stop there.
self.assertRedirects(
response, "/circular_redirect_2/", status_code=302, target_status_code=302
)
self.assertEqual(len(response.redirect_chain), 4)
def test_redirect_chain_post(self):
"A redirect chain will be followed from an initial POST post"
response = self.client.post("/redirects/", {"nothing": "to_send"}, follow=True)
self.assertRedirects(response, "/no_template_view/", 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_head(self):
"A redirect chain will be followed from an initial HEAD request"
response = self.client.head("/redirects/", {"nothing": "to_send"}, follow=True)
self.assertRedirects(response, "/no_template_view/", 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_options(self):
"A redirect chain will be followed from an initial OPTIONS request"
response = self.client.options("/redirects/", follow=True)
self.assertRedirects(response, "/no_template_view/", 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_put(self):
"A redirect chain will be followed from an initial PUT request"
response = self.client.put("/redirects/", follow=True)
self.assertRedirects(response, "/no_template_view/", 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_delete(self):
"A redirect chain will be followed from an initial DELETE request"
response = self.client.delete("/redirects/", follow=True)
self.assertRedirects(response, "/no_template_view/", 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
@modify_settings(ALLOWED_HOSTS={"append": "otherserver"})
def test_redirect_to_different_host(self):
"The test client will preserve scheme, host and port changes"
response = self.client.get("/redirect_other_host/", follow=True)
self.assertRedirects(
response,
"https://otherserver:8443/no_template_view/",
status_code=302,
target_status_code=200,
)
# We can't use is_secure() or get_host()
# because response.request is a dictionary, not an HttpRequest
self.assertEqual(response.request.get("wsgi.url_scheme"), "https")
self.assertEqual(response.request.get("SERVER_NAME"), "otherserver")
self.assertEqual(response.request.get("SERVER_PORT"), "8443")
# assertRedirects() can follow redirect to 'otherserver' too.
response = self.client.get("/redirect_other_host/", follow=False)
self.assertRedirects(
response,
"https://otherserver:8443/no_template_view/",
status_code=302,
target_status_code=200,
)
def test_redirect_chain_on_non_redirect_page(self):
"""
An assertion is raised if the original page couldn't be retrieved as
expected.
"""
# This page will redirect with code 301, not 302
response = self.client.get("/get_view/", follow=True)
try:
self.assertRedirects(response, "/get_view/")
except AssertionError as e:
self.assertIn(
"Response didn't redirect as expected: Response code was 200 "
"(expected 302)",
str(e),
)
try:
self.assertRedirects(response, "/get_view/", msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Response didn't redirect as expected: Response code was 200 "
"(expected 302)",
str(e),
)
def test_redirect_on_non_redirect_page(self):
"An assertion is raised if the original page couldn't be retrieved as expected"
# This page will redirect with code 301, not 302
response = self.client.get("/get_view/")
try:
self.assertRedirects(response, "/get_view/")
except AssertionError as e:
self.assertIn(
"Response didn't redirect as expected: Response code was 200 "
"(expected 302)",
str(e),
)
try:
self.assertRedirects(response, "/get_view/", msg_prefix="abc")
except AssertionError as e:
self.assertIn(
"abc: Response didn't redirect as expected: Response code was 200 "
"(expected 302)",
str(e),
)
def test_redirect_scheme(self):
"""
An assertion is raised if the response doesn't have the scheme
specified in expected_url.
"""
# For all possible True/False combinations of follow and secure
for follow, secure in itertools.product([True, False], repeat=2):
# always redirects to https
response = self.client.get(
"/https_redirect_view/", follow=follow, secure=secure
)
# the goal scheme is https
self.assertRedirects(
response, "https://testserver/secure_view/", status_code=302
)
with self.assertRaises(AssertionError):
self.assertRedirects(
response, "http://testserver/secure_view/", status_code=302
)
def test_redirect_fetch_redirect_response(self):
"""Preserve extra headers of requests made with django.test.Client."""
methods = (
"get",
"post",
"head",
"options",
"put",
"patch",
"delete",
"trace",
)
for method in methods:
with self.subTest(method=method):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_based_on_extra_headers_1/",
follow=False,
HTTP_REDIRECT="val",
)
self.assertRedirects(
response,
"/redirect_based_on_extra_headers_2/",
fetch_redirect_response=True,
status_code=302,
target_status_code=302,
)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class LoginTests(TestDataMixin, TestCase):
def test_login_different_client(self):
"Using a different test client doesn't violate authentication"
# Create a second client, and log in.
c = Client()
login = c.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Get a redirection page with the second client.
response = c.get("/login_protected_redirect_view/")
# At this points, the self.client isn't logged in.
# assertRedirects uses the original client, not the default client.
self.assertRedirects(response, "/get_view/")
@override_settings(
SESSION_ENGINE="test_client_regress.session",
ROOT_URLCONF="test_client_regress.urls",
)
class SessionEngineTests(TestDataMixin, TestCase):
def test_login(self):
"A session engine that modifies the session key can be used to log in"
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Try to access a login protected page.
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
@override_settings(
ROOT_URLCONF="test_client_regress.urls",
)
class URLEscapingTests(SimpleTestCase):
def test_simple_argument_get(self):
"Get a view that has a simple string argument"
response = self.client.get(reverse("arg_view", args=["Slartibartfast"]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Howdy, Slartibartfast")
def test_argument_with_space_get(self):
"Get a view that has a string argument that requires escaping"
response = self.client.get(reverse("arg_view", args=["Arthur Dent"]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Hi, Arthur")
def test_simple_argument_post(self):
"Post for a view that has a simple string argument"
response = self.client.post(reverse("arg_view", args=["Slartibartfast"]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Howdy, Slartibartfast")
def test_argument_with_space_post(self):
"Post for a view that has a string argument that requires escaping"
response = self.client.post(reverse("arg_view", args=["Arthur Dent"]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"Hi, Arthur")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class ExceptionTests(TestDataMixin, TestCase):
def test_exception_cleared(self):
"#5836 - A stale user exception isn't re-raised by the test client."
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
with self.assertRaises(CustomTestException):
self.client.get("/staff_only/")
# At this point, an exception has been raised, and should be cleared.
# This next operation should be successful; if it isn't we have a problem.
login = self.client.login(username="staff", password="password")
self.assertTrue(login, "Could not log in")
self.client.get("/staff_only/")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class TemplateExceptionTests(SimpleTestCase):
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "bad_templates")],
}
]
)
def test_bad_404_template(self):
"Errors found when rendering 404 error templates are re-raised"
with self.assertRaises(TemplateSyntaxError):
self.client.get("/no_such_view/")
# We need two different tests to check URLconf substitution - one to check
# it was changed, and another one (without self.urls) to check it was reverted on
# teardown. This pair of tests relies upon the alphabetical ordering of test execution.
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class UrlconfSubstitutionTests(SimpleTestCase):
def test_urlconf_was_changed(self):
"TestCase can enforce a custom URLconf on a per-test basis"
url = reverse("arg_view", args=["somename"])
self.assertEqual(url, "/arg_view/somename/")
# This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the
# name is to ensure alphabetical ordering.
class zzUrlconfSubstitutionTests(SimpleTestCase):
def test_urlconf_was_reverted(self):
"""URLconf is reverted to original value after modification in a TestCase
This will not find a match as the default ROOT_URLCONF is empty.
"""
with self.assertRaises(NoReverseMatch):
reverse("arg_view", args=["somename"])
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class ContextTests(TestDataMixin, TestCase):
def test_single_context(self):
"Context variables can be retrieved from a single context"
response = self.client.get("/request_data/", data={"foo": "whiz"})
self.assertIsInstance(response.context, RequestContext)
self.assertIn("get-foo", response.context)
self.assertEqual(response.context["get-foo"], "whiz")
self.assertEqual(response.context["data"], "sausage")
with self.assertRaisesMessage(KeyError, "does-not-exist"):
response.context["does-not-exist"]
def test_inherited_context(self):
"Context variables can be retrieved from a list of contexts"
response = self.client.get("/request_data_extended/", data={"foo": "whiz"})
self.assertEqual(response.context.__class__, ContextList)
self.assertEqual(len(response.context), 2)
self.assertIn("get-foo", response.context)
self.assertEqual(response.context["get-foo"], "whiz")
self.assertEqual(response.context["data"], "bacon")
with self.assertRaisesMessage(KeyError, "does-not-exist"):
response.context["does-not-exist"]
def test_contextlist_keys(self):
c1 = Context()
c1.update({"hello": "world", "goodbye": "john"})
c1.update({"hello": "dolly", "dolly": "parton"})
c2 = Context()
c2.update({"goodbye": "world", "python": "rocks"})
c2.update({"goodbye": "dolly"})
k = ContextList([c1, c2])
# None, True and False are builtins of BaseContext, and present
# in every Context without needing to be added.
self.assertEqual(
{"None", "True", "False", "hello", "goodbye", "python", "dolly"}, k.keys()
)
def test_contextlist_get(self):
c1 = Context({"hello": "world", "goodbye": "john"})
c2 = Context({"goodbye": "world", "python": "rocks"})
k = ContextList([c1, c2])
self.assertEqual(k.get("hello"), "world")
self.assertEqual(k.get("goodbye"), "john")
self.assertEqual(k.get("python"), "rocks")
self.assertEqual(k.get("nonexistent", "default"), "default")
def test_15368(self):
# Need to insert a context processor that assumes certain things about
# the request instance. This triggers a bug caused by some ways of
# copying RequestContext.
with self.settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"test_client_regress.context_processors.special",
],
},
}
]
):
response = self.client.get("/request_context_view/")
self.assertContains(response, "Path: /request_context_view/")
def test_nested_requests(self):
"""
response.context is not lost when view call another view.
"""
response = self.client.get("/nested_view/")
self.assertIsInstance(response.context, RequestContext)
self.assertEqual(response.context["nested"], "yes")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class SessionTests(TestDataMixin, TestCase):
def test_session(self):
"The session isn't lost if a user logs in"
# The session doesn't exist to start.
response = self.client.get("/check_session/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"NO")
# This request sets a session variable.
response = self.client.get("/set_session/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"set_session")
# The session has been modified
response = self.client.get("/check_session/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"YES")
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Session should still contain the modified value
response = self.client.get("/check_session/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"YES")
def test_session_initiated(self):
session = self.client.session
session["session_var"] = "foo"
session.save()
response = self.client.get("/check_session/")
self.assertEqual(response.content, b"foo")
def test_logout(self):
"""Logout should work whether the user is logged in or not (#9978)."""
self.client.logout()
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
self.client.logout()
self.client.logout()
def test_logout_with_user(self):
"""Logout should send user_logged_out signal if user was logged in."""
def listener(*args, **kwargs):
listener.executed = True
self.assertEqual(kwargs["sender"], User)
listener.executed = False
user_logged_out.connect(listener)
self.client.login(username="testclient", password="password")
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
@override_settings(AUTH_USER_MODEL="test_client_regress.CustomUser")
def test_logout_with_custom_user(self):
"""Logout should send user_logged_out signal if custom user was logged in."""
def listener(*args, **kwargs):
self.assertEqual(kwargs["sender"], CustomUser)
listener.executed = True
listener.executed = False
u = CustomUser.custom_objects.create(email="[email protected]")
u.set_password("password")
u.save()
user_logged_out.connect(listener)
self.client.login(username="[email protected]", password="password")
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
@override_settings(
AUTHENTICATION_BACKENDS=(
"django.contrib.auth.backends.ModelBackend",
"test_client_regress.auth_backends.CustomUserBackend",
)
)
def test_logout_with_custom_auth_backend(self):
"Request a logout after logging in with custom authentication backend"
def listener(*args, **kwargs):
self.assertEqual(kwargs["sender"], CustomUser)
listener.executed = True
listener.executed = False
u = CustomUser.custom_objects.create(email="[email protected]")
u.set_password("password")
u.save()
user_logged_out.connect(listener)
self.client.login(username="[email protected]", password="password")
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
def test_logout_without_user(self):
"""Logout should send signal even if user not authenticated."""
def listener(user, *args, **kwargs):
listener.user = user
listener.executed = True
listener.executed = False
user_logged_out.connect(listener)
self.client.login(username="incorrect", password="password")
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
self.assertIsNone(listener.user)
def test_login_with_user(self):
"""Login should send user_logged_in signal on successful login."""
def listener(*args, **kwargs):
listener.executed = True
listener.executed = False
user_logged_in.connect(listener)
self.client.login(username="testclient", password="password")
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
def test_login_without_signal(self):
"""Login shouldn't send signal if user wasn't logged in"""
def listener(*args, **kwargs):
listener.executed = True
listener.executed = False
user_logged_in.connect(listener)
self.client.login(username="incorrect", password="password")
user_logged_in.disconnect(listener)
self.assertFalse(listener.executed)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class RequestMethodTests(SimpleTestCase):
def test_get(self):
"Request a view via request method GET"
response = self.client.get("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: GET")
def test_post(self):
"Request a view via request method POST"
response = self.client.post("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: POST")
def test_head(self):
"Request a view via request method HEAD"
response = self.client.head("/request_methods/")
self.assertEqual(response.status_code, 200)
# A HEAD request doesn't return any content.
self.assertNotEqual(response.content, b"request method: HEAD")
self.assertEqual(response.content, b"")
def test_options(self):
"Request a view via request method OPTIONS"
response = self.client.options("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: OPTIONS")
def test_put(self):
"Request a view via request method PUT"
response = self.client.put("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: PUT")
def test_delete(self):
"Request a view via request method DELETE"
response = self.client.delete("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: DELETE")
def test_patch(self):
"Request a view via request method PATCH"
response = self.client.patch("/request_methods/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: PATCH")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class RequestMethodStringDataTests(SimpleTestCase):
def test_post(self):
"Request a view with string data via request method POST"
# Regression test for #11371
data = '{"test": "json"}'
response = self.client.post(
"/request_methods/", data=data, content_type="application/json"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: POST")
def test_put(self):
"Request a view with string data via request method PUT"
# Regression test for #11371
data = '{"test": "json"}'
response = self.client.put(
"/request_methods/", data=data, content_type="application/json"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: PUT")
def test_patch(self):
"Request a view with string data via request method PATCH"
# Regression test for #17797
data = '{"test": "json"}'
response = self.client.patch(
"/request_methods/", data=data, content_type="application/json"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"request method: PATCH")
def test_empty_string_data(self):
"Request a view with empty string data via request method GET/POST/HEAD"
# Regression test for #21740
response = self.client.get("/body/", data="", content_type="application/json")
self.assertEqual(response.content, b"")
response = self.client.post("/body/", data="", content_type="application/json")
self.assertEqual(response.content, b"")
response = self.client.head("/body/", data="", content_type="application/json")
self.assertEqual(response.content, b"")
def test_json_bytes(self):
response = self.client.post(
"/body/", data=b"{'value': 37}", content_type="application/json"
)
self.assertEqual(response.content, b"{'value': 37}")
def test_json(self):
response = self.client.get("/json_response/")
self.assertEqual(response.json(), {"key": "value"})
def test_json_charset(self):
response = self.client.get("/json_response_latin1/")
self.assertEqual(response.charset, "latin1")
self.assertEqual(response.json(), {"a": "Å"})
def test_json_structured_suffixes(self):
valid_types = (
"application/vnd.api+json",
"application/vnd.api.foo+json",
"application/json; charset=utf-8",
"application/activity+json",
"application/activity+json; charset=utf-8",
)
for content_type in valid_types:
response = self.client.get(
"/json_response/", {"content_type": content_type}
)
self.assertEqual(response.headers["Content-Type"], content_type)
self.assertEqual(response.json(), {"key": "value"})
def test_json_multiple_access(self):
response = self.client.get("/json_response/")
self.assertIs(response.json(), response.json())
def test_json_wrong_header(self):
response = self.client.get("/body/")
msg = (
'Content-Type header is "text/html; charset=utf-8", not "application/json"'
)
with self.assertRaisesMessage(ValueError, msg):
self.assertEqual(response.json(), {"key": "value"})
@override_settings(
ROOT_URLCONF="test_client_regress.urls",
)
class QueryStringTests(SimpleTestCase):
def test_get_like_requests(self):
for method_name in ("get", "head"):
# A GET-like request can pass a query string as data (#10571)
method = getattr(self.client, method_name)
response = method("/request_data/", data={"foo": "whiz"})
self.assertEqual(response.context["get-foo"], "whiz")
# A GET-like request can pass a query string as part of the URL
response = method("/request_data/?foo=whiz")
self.assertEqual(response.context["get-foo"], "whiz")
# Data provided in the URL to a GET-like request is overridden by
# actual form data.
response = method("/request_data/?foo=whiz", data={"foo": "bang"})
self.assertEqual(response.context["get-foo"], "bang")
response = method("/request_data/?foo=whiz", data={"bar": "bang"})
self.assertIsNone(response.context["get-foo"])
self.assertEqual(response.context["get-bar"], "bang")
def test_post_like_requests(self):
# A POST-like request can pass a query string as data
response = self.client.post("/request_data/", data={"foo": "whiz"})
self.assertIsNone(response.context["get-foo"])
self.assertEqual(response.context["post-foo"], "whiz")
# A POST-like request can pass a query string as part of the URL
response = self.client.post("/request_data/?foo=whiz")
self.assertEqual(response.context["get-foo"], "whiz")
self.assertIsNone(response.context["post-foo"])
# POST data provided in the URL augments actual form data
response = self.client.post("/request_data/?foo=whiz", data={"foo": "bang"})
self.assertEqual(response.context["get-foo"], "whiz")
self.assertEqual(response.context["post-foo"], "bang")
response = self.client.post("/request_data/?foo=whiz", data={"bar": "bang"})
self.assertEqual(response.context["get-foo"], "whiz")
self.assertIsNone(response.context["get-bar"])
self.assertIsNone(response.context["post-foo"])
self.assertEqual(response.context["post-bar"], "bang")
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class PayloadEncodingTests(SimpleTestCase):
"""Regression tests for #10571."""
def test_simple_payload(self):
"""A simple ASCII-only text can be POSTed."""
text = "English: mountain pass"
response = self.client.post(
"/parse_encoded_text/", text, content_type="text/plain"
)
self.assertEqual(response.content, text.encode())
def test_utf8_payload(self):
"""Non-ASCII data encoded as UTF-8 can be POSTed."""
text = "dog: собака"
response = self.client.post(
"/parse_encoded_text/", text, content_type="text/plain; charset=utf-8"
)
self.assertEqual(response.content, text.encode())
def test_utf16_payload(self):
"""Non-ASCII data encoded as UTF-16 can be POSTed."""
text = "dog: собака"
response = self.client.post(
"/parse_encoded_text/", text, content_type="text/plain; charset=utf-16"
)
self.assertEqual(response.content, text.encode("utf-16"))
def test_non_utf_payload(self):
"""Non-ASCII data as a non-UTF based encoding can be POSTed."""
text = "dog: собака"
response = self.client.post(
"/parse_encoded_text/", text, content_type="text/plain; charset=koi8-r"
)
self.assertEqual(response.content, text.encode("koi8-r"))
class DummyFile:
def __init__(self, filename):
self.name = filename
def read(self):
return b"TEST_FILE_CONTENT"
class UploadedFileEncodingTest(SimpleTestCase):
def test_file_encoding(self):
encoded_file = encode_file(
"TEST_BOUNDARY", "TEST_KEY", DummyFile("test_name.bin")
)
self.assertEqual(b"--TEST_BOUNDARY", encoded_file[0])
self.assertEqual(
b'Content-Disposition: form-data; name="TEST_KEY"; '
b'filename="test_name.bin"',
encoded_file[1],
)
self.assertEqual(b"TEST_FILE_CONTENT", encoded_file[-1])
def test_guesses_content_type_on_file_encoding(self):
self.assertEqual(
b"Content-Type: application/octet-stream",
encode_file("IGNORE", "IGNORE", DummyFile("file.bin"))[2],
)
self.assertEqual(
b"Content-Type: text/plain",
encode_file("IGNORE", "IGNORE", DummyFile("file.txt"))[2],
)
self.assertIn(
encode_file("IGNORE", "IGNORE", DummyFile("file.zip"))[2],
(
b"Content-Type: application/x-compress",
b"Content-Type: application/x-zip",
b"Content-Type: application/x-zip-compressed",
b"Content-Type: application/zip",
),
)
self.assertEqual(
b"Content-Type: application/octet-stream",
encode_file("IGNORE", "IGNORE", DummyFile("file.unknown"))[2],
)
@override_settings(
ROOT_URLCONF="test_client_regress.urls",
)
class RequestHeadersTest(SimpleTestCase):
def test_client_headers(self):
"A test client can receive custom headers"
response = self.client.get(
"/check_headers/", headers={"x-arg-check": "Testing 123"}
)
self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123")
self.assertEqual(response.status_code, 200)
def test_client_headers_redirect(self):
"Test client headers are preserved through redirects"
response = self.client.get(
"/check_headers_redirect/",
follow=True,
headers={"x-arg-check": "Testing 123"},
)
self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123")
self.assertRedirects(
response, "/check_headers/", status_code=302, target_status_code=200
)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class ReadLimitedStreamTest(SimpleTestCase):
"""
HttpRequest.body, HttpRequest.read(), and HttpRequest.read(BUFFER) have
proper LimitedStream behavior.
Refs #14753, #15785
"""
def test_body_from_empty_request(self):
"""HttpRequest.body on a test client GET request should return
the empty string."""
self.assertEqual(self.client.get("/body/").content, b"")
def test_read_from_empty_request(self):
"""HttpRequest.read() on a test client GET request should return the
empty string."""
self.assertEqual(self.client.get("/read_all/").content, b"")
def test_read_numbytes_from_empty_request(self):
"""HttpRequest.read(LARGE_BUFFER) on a test client GET request should
return the empty string."""
self.assertEqual(self.client.get("/read_buffer/").content, b"")
def test_read_from_nonempty_request(self):
"""HttpRequest.read() on a test client PUT request with some payload
should return that payload."""
payload = b"foobar"
self.assertEqual(
self.client.put(
"/read_all/", data=payload, content_type="text/plain"
).content,
payload,
)
def test_read_numbytes_from_nonempty_request(self):
"""HttpRequest.read(LARGE_BUFFER) on a test client PUT request with
some payload should return that payload."""
payload = b"foobar"
self.assertEqual(
self.client.put(
"/read_buffer/", data=payload, content_type="text/plain"
).content,
payload,
)
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class RequestFactoryStateTest(SimpleTestCase):
"""Regression tests for #15929."""
# These tests are checking that certain middleware don't change certain
# global state. Alternatively, from the point of view of a test, they are
# ensuring test isolation behavior. So, unusually, it doesn't make sense to
# run the tests individually, and if any are failing it is confusing to run
# them with any other set of tests.
def common_test_that_should_always_pass(self):
request = RequestFactory().get("/")
request.session = {}
self.assertFalse(hasattr(request, "user"))
def test_request(self):
self.common_test_that_should_always_pass()
def test_request_after_client(self):
# apart from the next line the three tests are identical
self.client.get("/")
self.common_test_that_should_always_pass()
def test_request_after_client_2(self):
# This test is executed after the previous one
self.common_test_that_should_always_pass()
@override_settings(ROOT_URLCONF="test_client_regress.urls")
class RequestFactoryEnvironmentTests(SimpleTestCase):
"""
Regression tests for #8551 and #17067: ensure that environment variables
are set correctly in RequestFactory.
"""
def test_should_set_correct_env_variables(self):
request = RequestFactory().get("/path/")
self.assertEqual(request.META.get("REMOTE_ADDR"), "127.0.0.1")
self.assertEqual(request.META.get("SERVER_NAME"), "testserver")
self.assertEqual(request.META.get("SERVER_PORT"), "80")
self.assertEqual(request.META.get("SERVER_PROTOCOL"), "HTTP/1.1")
self.assertEqual(
request.META.get("SCRIPT_NAME") + request.META.get("PATH_INFO"), "/path/"
)
def test_cookies(self):
factory = RequestFactory()
factory.cookies.load('A="B"; C="D"; Path=/; Version=1')
request = factory.get("/")
self.assertEqual(request.META["HTTP_COOKIE"], 'A="B"; C="D"')
|
97b26532307c635f955c2aa6485b1c726f89092eb1509adea1975c4b9530cd74 | import sys
import threading
import time
from unittest import skipIf, skipUnless
from django.db import (
DatabaseError,
Error,
IntegrityError,
OperationalError,
connection,
transaction,
)
from django.test import (
TestCase,
TransactionTestCase,
skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import Reporter
@skipUnlessDBFeature("uses_savepoints")
class AtomicTests(TransactionTestCase):
"""
Tests for the atomic decorator and context manager.
The tests make assertions on internal attributes because there isn't a
robust way to ask the database for its current transaction state.
Since the decorator syntax is converted into a context manager (see the
implementation), there are only a few basic tests with the decorator
syntax and the bulk of the tests use the context manager syntax.
"""
available_apps = ["transactions"]
def test_decorator_syntax_commit(self):
@transaction.atomic
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_decorator_syntax_rollback(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_alternate_decorator_syntax_commit(self):
@transaction.atomic()
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_alternate_decorator_syntax_rollback(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_commit(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_commit_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_nested_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic():
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_merged_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
# Writes in the outer block are rolled back too.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_commit_commit(self):
atomic = transaction.atomic()
with atomic:
reporter1 = Reporter.objects.create(first_name="Tintin")
with atomic:
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_reuse_commit_rollback(self):
atomic = transaction.atomic()
with atomic:
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_reuse_rollback_commit(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with atomic:
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_rollback_rollback(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_force_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
# atomic block shouldn't rollback, but force it.
self.assertFalse(transaction.get_rollback())
transaction.set_rollback(True)
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_prevent_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
sid = transaction.savepoint()
# trigger a database error inside an inner atomic without savepoint
with self.assertRaises(DatabaseError):
with transaction.atomic(savepoint=False):
with connection.cursor() as cursor:
cursor.execute("SELECT no_such_col FROM transactions_reporter")
# prevent atomic from rolling back since we're recovering manually
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
transaction.savepoint_rollback(sid)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
@skipUnlessDBFeature("can_release_savepoints")
def test_failure_on_exit_transaction(self):
with transaction.atomic():
with self.assertRaises(DatabaseError):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
self.assertEqual(len(Reporter.objects.all()), 1)
# Incorrect savepoint id to provoke a database error.
connection.savepoint_ids.append("12")
with self.assertRaises(transaction.TransactionManagementError):
len(Reporter.objects.all())
self.assertIs(connection.needs_rollback, True)
if connection.savepoint_ids:
connection.savepoint_ids.pop()
self.assertSequenceEqual(Reporter.objects.all(), [])
class AtomicInsideTransactionTests(AtomicTests):
"""All basic tests for atomic should also pass within an existing transaction."""
def setUp(self):
self.atomic = transaction.atomic()
self.atomic.__enter__()
def tearDown(self):
self.atomic.__exit__(*sys.exc_info())
class AtomicWithoutAutocommitTests(AtomicTests):
"""All basic tests for atomic should also pass when autocommit is turned off."""
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
# The tests access the database after exercising 'atomic', initiating
# a transaction ; a rollback is required before restoring autocommit.
transaction.rollback()
transaction.set_autocommit(True)
@skipUnlessDBFeature("uses_savepoints")
class AtomicMergeTests(TransactionTestCase):
"""Test merging transactions with savepoint=False."""
available_apps = ["transactions"]
def test_merged_outer_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The first block has a savepoint and must roll back.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_inner_savepoint_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second block has a savepoint and must roll back.
self.assertEqual(Reporter.objects.count(), 1)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
@skipUnlessDBFeature("uses_savepoints")
class AtomicErrorsTests(TransactionTestCase):
available_apps = ["transactions"]
forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active."
def test_atomic_prevents_setting_autocommit(self):
autocommit = transaction.get_autocommit()
with transaction.atomic():
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.set_autocommit(not autocommit)
# Make sure autocommit wasn't changed.
self.assertEqual(connection.autocommit, autocommit)
def test_atomic_prevents_calling_transaction_methods(self):
with transaction.atomic():
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.commit()
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.rollback()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# The transaction is marked as needing rollback.
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(
transaction.TransactionManagementError, msg
) as cm:
r2.save(force_update=True)
self.assertIsInstance(cm.exception.__cause__, IntegrityError)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock")
@skipIfDBFeature("atomic_transactions")
def test_atomic_allows_queries_after_fixing_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# Mark the transaction as no longer needing rollback.
transaction.set_rollback(False)
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus")
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self):
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
connection.close()
# The connection is closed and the transaction is marked as
# needing rollback. This will raise an InterfaceError on databases
# that refuse to create cursors on closed connections (PostgreSQL)
# and a TransactionManagementError on other databases.
with self.assertRaises(Error):
Reporter.objects.create(first_name="Cuthbert", last_name="Calculus")
# The connection is usable again .
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature("uses_savepoints")
@skipUnless(connection.vendor == "mysql", "MySQL-specific behaviors")
class AtomicMySQLTests(TransactionTestCase):
available_apps = ["transactions"]
@skipIf(threading is None, "Test requires threading")
def test_implicit_savepoint_rollback(self):
"""MySQL implicitly rolls back savepoints when it deadlocks (#22291)."""
Reporter.objects.create(id=1)
Reporter.objects.create(id=2)
main_thread_ready = threading.Event()
def other_thread():
try:
with transaction.atomic():
Reporter.objects.select_for_update().get(id=1)
main_thread_ready.wait()
# 1) This line locks... (see below for 2)
Reporter.objects.exclude(id=1).update(id=2)
finally:
# This is the thread-local connection, not the main connection.
connection.close()
other_thread = threading.Thread(target=other_thread)
other_thread.start()
with self.assertRaisesMessage(OperationalError, "Deadlock found"):
# Double atomic to enter a transaction and create a savepoint.
with transaction.atomic():
with transaction.atomic():
Reporter.objects.select_for_update().get(id=2)
main_thread_ready.set()
# The two threads can't be synchronized with an event here
# because the other thread locks. Sleep for a little while.
time.sleep(1)
# 2) ... and this line deadlocks. (see above for 1)
Reporter.objects.exclude(id=2).update(id=1)
other_thread.join()
class AtomicMiscTests(TransactionTestCase):
available_apps = ["transactions"]
def test_wrap_callable_instance(self):
"""#20028 -- Atomic must support wrapping callable instances."""
class Callable:
def __call__(self):
pass
# Must not raise an exception
transaction.atomic(Callable())
@skipUnlessDBFeature("can_release_savepoints")
def test_atomic_does_not_leak_savepoints_on_failure(self):
"""#23074 -- Savepoints must be released after rollback."""
# Expect an error when rolling back a savepoint that doesn't exist.
# Done outside of the transaction block to ensure proper recovery.
with self.assertRaises(Error):
# Start a plain transaction.
with transaction.atomic():
# Swallow the intentional error raised in the sub-transaction.
with self.assertRaisesMessage(Exception, "Oops"):
# Start a sub-transaction with a savepoint.
with transaction.atomic():
sid = connection.savepoint_ids[-1]
raise Exception("Oops")
# This is expected to fail because the savepoint no longer exists.
connection.savepoint_rollback(sid)
def test_mark_for_rollback_on_error_in_transaction(self):
with transaction.atomic(savepoint=False):
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the
# transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_rollback())
raise Exception("Oops")
# mark_for_rollback_on_error marked the transaction as broken …
self.assertTrue(transaction.get_rollback())
# … and further queries fail.
msg = "You can't execute queries until the end of the 'atomic' block."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
Reporter.objects.create()
# Transaction errors are reset at the end of an transaction, so this
# should just work.
Reporter.objects.create()
def test_mark_for_rollback_on_error_in_autocommit(self):
self.assertTrue(transaction.get_autocommit())
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction
# is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_connection().needs_rollback)
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` did not mark the transaction
# as broken, since we are in autocommit mode …
self.assertFalse(transaction.get_connection().needs_rollback)
# … and further queries work nicely.
Reporter.objects.create()
class NonAutocommitTests(TransactionTestCase):
available_apps = []
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
transaction.rollback()
transaction.set_autocommit(True)
def test_orm_query_after_error_and_rollback(self):
"""
ORM queries are allowed after an error and a rollback in non-autocommit
mode (#27504).
"""
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
transaction.rollback()
Reporter.objects.last()
def test_orm_query_without_autocommit(self):
"""#24921 -- ORM queries must be possible after set_autocommit(False)."""
Reporter.objects.create(first_name="Tintin")
class DurableTestsBase:
available_apps = ["transactions"]
def test_commit(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin")
self.assertEqual(Reporter.objects.get(), reporter)
def test_nested_outer_durable(self):
with transaction.atomic(durable=True):
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name="Archibald",
last_name="Haddock",
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_both_durable(self):
msg = "A durable atomic block cannot be nested within another atomic block."
with transaction.atomic(durable=True):
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
def test_nested_inner_durable(self):
msg = "A durable atomic block cannot be nested within another atomic block."
with transaction.atomic():
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
def test_sequence_of_durables(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin 1")
self.assertEqual(Reporter.objects.get(first_name="Tintin 1"), reporter)
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin 2")
self.assertEqual(Reporter.objects.get(first_name="Tintin 2"), reporter)
class DurableTransactionTests(DurableTestsBase, TransactionTestCase):
pass
class DurableTests(DurableTestsBase, TestCase):
pass
|
264c825199f89a2ff2c213bcd3af4a2874560639fb8250efd3ee3b4bed946538 | # Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import copy
import io
import os
import pickle
import re
import shutil
import sys
import tempfile
import threading
import time
import unittest
from pathlib import Path
from unittest import mock, skipIf
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS,
CacheHandler,
CacheKeyWarning,
InvalidCacheKey,
cache,
caches,
)
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.cache.backends.redis import RedisCacheClient
from django.core.cache.utils import make_template_fragment_key
from django.db import close_old_connections, connection, connections
from django.db.backends.utils import CursorWrapper
from django.http import (
HttpRequest,
HttpResponse,
HttpResponseNotModified,
StreamingHttpResponse,
)
from django.middleware.cache import (
CacheMiddleware,
FetchFromCacheMiddleware,
UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
TransactionTestCase,
override_settings,
)
from django.test.signals import setting_changed
from django.test.utils import CaptureQueriesContext
from django.utils import timezone, translation
from django.utils.cache import (
get_cache_key,
learn_cache_key,
patch_cache_control,
patch_vary_headers,
)
from django.views.decorators.cache import cache_control, cache_page
from .models import Poll, expensive_calculation
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpicklable:
def __getstate__(self):
raise pickle.PickleError()
def empty_response(request):
return HttpResponse()
KEY_ERRORS_WITH_MEMCACHED_MSG = (
"Cache key contains characters that will cause errors if used with memcached: %r"
)
@override_settings(
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
}
}
)
class DummyCacheTests(SimpleTestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), True)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Nonexistent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set_many({"a": "a", "b": "b", "c": "c", "d": "d"})
self.assertEqual(cache.get_many(["a", "c", "d"]), {})
self.assertEqual(cache.get_many(["a", "b", "e"]), {})
def test_get_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ":1:key with spaces"
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.get_many(["key with spaces"])
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertIsNone(cache.get("key1"))
self.assertIs(cache.delete("key1"), False)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), False)
self.assertIs(cache.has_key("goodbye1"), False)
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set("answer", 42)
with self.assertRaises(ValueError):
cache.incr("answer")
with self.assertRaises(ValueError):
cache.incr("does_not_exist")
with self.assertRaises(ValueError):
cache.incr("does_not_exist", -1)
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set("answer", 42)
with self.assertRaises(ValueError):
cache.decr("answer")
with self.assertRaises(ValueError):
cache.decr("does_not_exist")
with self.assertRaises(ValueError):
cache.decr("does_not_exist", -1)
def test_touch(self):
"""Dummy cache can't do touch()."""
self.assertIs(cache.touch("whatever"), False)
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
tests = {
"string": "this is a string",
"int": 42,
"bool": True,
"list": [1, 2, 3, 4],
"tuple": (1, 2, 3, 4),
"dict": {"A": 1, "B": 2},
"function": f,
"class": C,
}
for key, value in tests.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set("expire1", "very quickly", 1)
cache.set("expire2", "very quickly", 1)
cache.set("expire3", "very quickly", 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertIsNone(cache.get("expire2"))
self.assertIs(cache.has_key("expire3"), False)
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
"ascii": "ascii_value",
"unicode_ascii": "Iñtërnâtiônàlizætiøn1",
"Iñtërnâtiônàlizætiøn": "Iñtërnâtiônàlizætiøn2",
"ascii2": {"x": 1},
}
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
self.assertEqual(cache.set_many({"a": 1, "b": 2}), [])
self.assertEqual(cache.set_many({"a": 1, "b": 2}, timeout=2, version="1"), [])
def test_set_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ":1:key with spaces"
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.set_many({"key with spaces": "foo"})
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(["a", "b"])
def test_delete_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ":1:key with spaces"
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.delete_many(["key with spaces"])
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set("answer", 42)
with self.assertRaises(ValueError):
cache.incr_version("answer")
with self.assertRaises(ValueError):
cache.incr_version("does_not_exist")
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set("answer", 42)
with self.assertRaises(ValueError):
cache.decr_version("answer")
with self.assertRaises(ValueError):
cache.decr_version("does_not_exist")
def test_get_or_set(self):
self.assertEqual(cache.get_or_set("mykey", "default"), "default")
self.assertIsNone(cache.get_or_set("mykey", None))
def test_get_or_set_callable(self):
def my_callable():
return "default"
self.assertEqual(cache.get_or_set("mykey", my_callable), "default")
self.assertEqual(cache.get_or_set("mykey", my_callable()), "default")
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return "CUSTOM-" + "-".join([key_prefix, str(version), key])
_caches_setting_base = {
"default": {},
"prefix": {"KEY_PREFIX": "cacheprefix{}".format(os.getpid())},
"v2": {"VERSION": 2},
"custom_key": {"KEY_FUNCTION": custom_key_func},
"custom_key2": {"KEY_FUNCTION": "cache.tests.custom_key_func"},
"cull": {"OPTIONS": {"MAX_ENTRIES": 30}},
"zero_cull": {"OPTIONS": {"CULL_FREQUENCY": 0, "MAX_ENTRIES": 30}},
}
def caches_setting_for_tests(base=None, exclude=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `exclude` is a set of cache names denoting which `_caches_setting_base` keys
# should be omitted.
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
exclude = exclude or set()
setting = {k: base.copy() for k in _caches_setting_base if k not in exclude}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests:
# A common set of tests to apply to all cache backends
factory = RequestFactory()
# Some clients raise custom exceptions when .incr() or .decr() are called
# with a non-integer value.
incr_decr_type_error = TypeError
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_default_used_when_none_is_set(self):
"""If None is cached, get() returns it instead of the default."""
cache.set("key_default_none", None)
self.assertIsNone(cache.get("key_default_none", default="default"))
def test_add(self):
# A key can be added to a cache
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), False)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set("somekey", "value")
# should not be set in the prefixed cache
self.assertIs(caches["prefix"].has_key("somekey"), False)
caches["prefix"].set("somekey", "value2")
self.assertEqual(cache.get("somekey"), "value")
self.assertEqual(caches["prefix"].get("somekey"), "value2")
def test_non_existent(self):
"""Nonexistent cache keys return as None/default."""
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set_many({"a": "a", "b": "b", "c": "c", "d": "d"})
self.assertEqual(
cache.get_many(["a", "c", "d"]), {"a": "a", "c": "c", "d": "d"}
)
self.assertEqual(cache.get_many(["a", "b", "e"]), {"a": "a", "b": "b"})
self.assertEqual(cache.get_many(iter(["a", "b", "e"])), {"a": "a", "b": "b"})
cache.set_many({"x": None, "y": 1})
self.assertEqual(cache.get_many(["x", "y"]), {"x": None, "y": 1})
def test_delete(self):
# Cache keys can be deleted
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertIs(cache.delete("key1"), True)
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_delete_nonexistent(self):
self.assertIs(cache.delete("nonexistent_key"), False)
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), True)
self.assertIs(cache.has_key("goodbye1"), False)
cache.set("no_expiry", "here", None)
self.assertIs(cache.has_key("no_expiry"), True)
cache.set("null", None)
self.assertIs(cache.has_key("null"), True)
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
cache.set("null", None)
self.assertIn("null", cache)
def test_incr(self):
# Cache values can be incremented
cache.set("answer", 41)
self.assertEqual(cache.incr("answer"), 42)
self.assertEqual(cache.get("answer"), 42)
self.assertEqual(cache.incr("answer", 10), 52)
self.assertEqual(cache.get("answer"), 52)
self.assertEqual(cache.incr("answer", -10), 42)
with self.assertRaises(ValueError):
cache.incr("does_not_exist")
with self.assertRaises(ValueError):
cache.incr("does_not_exist", -1)
cache.set("null", None)
with self.assertRaises(self.incr_decr_type_error):
cache.incr("null")
def test_decr(self):
# Cache values can be decremented
cache.set("answer", 43)
self.assertEqual(cache.decr("answer"), 42)
self.assertEqual(cache.get("answer"), 42)
self.assertEqual(cache.decr("answer", 10), 32)
self.assertEqual(cache.get("answer"), 32)
self.assertEqual(cache.decr("answer", -10), 42)
with self.assertRaises(ValueError):
cache.decr("does_not_exist")
with self.assertRaises(ValueError):
cache.incr("does_not_exist", -1)
cache.set("null", None)
with self.assertRaises(self.incr_decr_type_error):
cache.decr("null")
def test_close(self):
self.assertTrue(hasattr(cache, "close"))
cache.close()
def test_data_types(self):
# Many different data types can be cached
tests = {
"string": "this is a string",
"int": 42,
"bool": True,
"list": [1, 2, 3, 4],
"tuple": (1, 2, 3, 4),
"dict": {"A": 1, "B": 2},
"function": f,
"class": C,
}
for key, value in tests.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertEqual(cache.get(key), value)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set("question", my_poll)
cached_poll = cache.get("question")
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.defer("question")
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set("deferred_queryset", defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.defer("question")
self.assertEqual(defer_qs.count(), 1)
cache.set("deferred_queryset", defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get("deferred_queryset")
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set("expire1", "very quickly", 1)
cache.set("expire2", "very quickly", 1)
cache.set("expire3", "very quickly", 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertIs(cache.has_key("expire3"), False)
def test_touch(self):
# cache.touch() updates the timeout.
cache.set("expire1", "very quickly", timeout=1)
self.assertIs(cache.touch("expire1", timeout=4), True)
time.sleep(2)
self.assertIs(cache.has_key("expire1"), True)
time.sleep(3)
self.assertIs(cache.has_key("expire1"), False)
# cache.touch() works without the timeout argument.
cache.set("expire1", "very quickly", timeout=1)
self.assertIs(cache.touch("expire1"), True)
time.sleep(2)
self.assertIs(cache.has_key("expire1"), True)
self.assertIs(cache.touch("nonexistent"), False)
def test_unicode(self):
# Unicode values can be cached
stuff = {
"ascii": "ascii_value",
"unicode_ascii": "Iñtërnâtiônàlizætiøn1",
"Iñtërnâtiônàlizætiøn": "Iñtërnâtiônàlizætiøn2",
"ascii2": {"x": 1},
}
# Test `set`
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertIs(cache.delete(key), True)
self.assertIs(cache.add(key, value), True)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
self.assertIs(cache.delete(key), True)
cache.set_many(stuff)
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = "value_to_be_compressed"
compressed_value = compress(value.encode())
# Test set
cache.set("binary1", compressed_value)
compressed_result = cache.get("binary1")
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
self.assertIs(cache.add("binary1-add", compressed_value), True)
compressed_result = cache.get("binary1-add")
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({"binary1-set_many": compressed_value})
compressed_result = cache.get("binary1-set_many")
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_returns_empty_list_on_success(self):
"""set_many() returns an empty list when all keys are inserted."""
failing_keys = cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(failing_keys, [])
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_set_many_empty_data(self):
self.assertEqual(cache.set_many({}), [])
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set_many({"key1": "spam", "key2": "eggs", "key3": "ham"})
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_delete_many_no_keys(self):
self.assertIsNone(cache.delete_many([]))
def test_clear(self):
# The cache can be emptied using clear
cache.set_many({"key1": "spam", "key2": "eggs"})
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
"""
Follow memcached's convention where a timeout greater than 30 days is
treated as an absolute expiration timestamp instead of a relative
offset (#12399).
"""
cache.set("key1", "eggs", 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get("key1"), "eggs")
self.assertIs(cache.add("key2", "ham", 60 * 60 * 24 * 30 + 1), True)
self.assertEqual(cache.get("key2"), "ham")
cache.set_many(
{"key3": "sausage", "key4": "lobster bisque"}, 60 * 60 * 24 * 30 + 1
)
self.assertEqual(cache.get("key3"), "sausage")
self.assertEqual(cache.get("key4"), "lobster bisque")
def test_forever_timeout(self):
"""
Passing in None into timeout results in a value that is cached forever
"""
cache.set("key1", "eggs", None)
self.assertEqual(cache.get("key1"), "eggs")
self.assertIs(cache.add("key2", "ham", None), True)
self.assertEqual(cache.get("key2"), "ham")
self.assertIs(cache.add("key1", "new eggs", None), False)
self.assertEqual(cache.get("key1"), "eggs")
cache.set_many({"key3": "sausage", "key4": "lobster bisque"}, None)
self.assertEqual(cache.get("key3"), "sausage")
self.assertEqual(cache.get("key4"), "lobster bisque")
cache.set("key5", "belgian fries", timeout=1)
self.assertIs(cache.touch("key5", timeout=None), True)
time.sleep(2)
self.assertEqual(cache.get("key5"), "belgian fries")
def test_zero_timeout(self):
"""
Passing in zero into timeout results in a value that is not cached
"""
cache.set("key1", "eggs", 0)
self.assertIsNone(cache.get("key1"))
self.assertIs(cache.add("key2", "ham", 0), True)
self.assertIsNone(cache.get("key2"))
cache.set_many({"key3": "sausage", "key4": "lobster bisque"}, 0)
self.assertIsNone(cache.get("key3"))
self.assertIsNone(cache.get("key4"))
cache.set("key5", "belgian fries", timeout=5)
self.assertIs(cache.touch("key5", timeout=0), True)
self.assertIsNone(cache.get("key5"))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache_name, initial_count, final_count):
try:
cull_cache = caches[cull_cache_name]
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set("cull%d" % i, "value", 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key("cull%d" % i):
count += 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test("cull", 50, 29)
def test_zero_cull(self):
self._perform_cull_test("zero_cull", 50, 19)
def test_cull_delete_when_store_empty(self):
try:
cull_cache = caches["cull"]
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
old_max_entries = cull_cache._max_entries
# Force _cull to delete on first cached record.
cull_cache._max_entries = -1
try:
cull_cache.set("force_cull_delete", "value", 1000)
self.assertIs(cull_cache.has_key("force_cull_delete"), True)
finally:
cull_cache._max_entries = old_max_entries
def _perform_invalid_key_test(self, key, expected_warning, key_func=None):
"""
All the builtin backends should warn (except memcached that should
error) on keys that would be refused by memcached. This encourages
portable caching code without making it too difficult to use production
backends with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = key_func or func
tests = [
("add", [key, 1]),
("get", [key]),
("set", [key, 1]),
("incr", [key]),
("decr", [key]),
("touch", [key]),
("delete", [key]),
("get_many", [[key, "b"]]),
("set_many", [{key: 1, "b": 2}]),
("delete_many", [[key, "b"]]),
]
try:
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertWarns(CacheKeyWarning) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.warning), expected_warning)
finally:
cache.key_func = old_func
def test_invalid_key_characters(self):
# memcached doesn't allow whitespace or control characters in keys.
key = "key with spaces and 清"
self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key)
def test_invalid_key_length(self):
# memcached limits key length to 250.
key = ("a" * 250) + "清"
expected_warning = (
"Cache key will cause errors if used with memcached: "
"%r (longer than %s)" % (key, 250)
)
self._perform_invalid_key_test(key, expected_warning)
def test_invalid_with_version_key_length(self):
# Custom make_key() that adds a version to the key and exceeds the
# limit.
def key_func(key, *args):
return key + ":1"
key = "a" * 249
expected_warning = (
"Cache key will cause errors if used with memcached: "
"%r (longer than %s)" % (key_func(key), 250)
)
self._perform_invalid_key_test(key, expected_warning, key_func=key_func)
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set("answer1", 42)
self.assertEqual(cache.get("answer1"), 42)
self.assertEqual(cache.get("answer1", version=1), 42)
self.assertIsNone(cache.get("answer1", version=2))
self.assertIsNone(caches["v2"].get("answer1"))
self.assertEqual(caches["v2"].get("answer1", version=1), 42)
self.assertIsNone(caches["v2"].get("answer1", version=2))
# set, default version = 1, but manually override version = 2
cache.set("answer2", 42, version=2)
self.assertIsNone(cache.get("answer2"))
self.assertIsNone(cache.get("answer2", version=1))
self.assertEqual(cache.get("answer2", version=2), 42)
self.assertEqual(caches["v2"].get("answer2"), 42)
self.assertIsNone(caches["v2"].get("answer2", version=1))
self.assertEqual(caches["v2"].get("answer2", version=2), 42)
# v2 set, using default version = 2
caches["v2"].set("answer3", 42)
self.assertIsNone(cache.get("answer3"))
self.assertIsNone(cache.get("answer3", version=1))
self.assertEqual(cache.get("answer3", version=2), 42)
self.assertEqual(caches["v2"].get("answer3"), 42)
self.assertIsNone(caches["v2"].get("answer3", version=1))
self.assertEqual(caches["v2"].get("answer3", version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches["v2"].set("answer4", 42, version=1)
self.assertEqual(cache.get("answer4"), 42)
self.assertEqual(cache.get("answer4", version=1), 42)
self.assertIsNone(cache.get("answer4", version=2))
self.assertIsNone(caches["v2"].get("answer4"))
self.assertEqual(caches["v2"].get("answer4", version=1), 42)
self.assertIsNone(caches["v2"].get("answer4", version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
self.assertIs(cache.add("answer1", 42, version=2), True)
self.assertIsNone(cache.get("answer1", version=1))
self.assertEqual(cache.get("answer1", version=2), 42)
self.assertIs(cache.add("answer1", 37, version=2), False)
self.assertIsNone(cache.get("answer1", version=1))
self.assertEqual(cache.get("answer1", version=2), 42)
self.assertIs(cache.add("answer1", 37, version=1), True)
self.assertEqual(cache.get("answer1", version=1), 37)
self.assertEqual(cache.get("answer1", version=2), 42)
# v2 add, using default version = 2
self.assertIs(caches["v2"].add("answer2", 42), True)
self.assertIsNone(cache.get("answer2", version=1))
self.assertEqual(cache.get("answer2", version=2), 42)
self.assertIs(caches["v2"].add("answer2", 37), False)
self.assertIsNone(cache.get("answer2", version=1))
self.assertEqual(cache.get("answer2", version=2), 42)
self.assertIs(caches["v2"].add("answer2", 37, version=1), True)
self.assertEqual(cache.get("answer2", version=1), 37)
self.assertEqual(cache.get("answer2", version=2), 42)
# v2 add, default version = 2, but manually override version = 1
self.assertIs(caches["v2"].add("answer3", 42, version=1), True)
self.assertEqual(cache.get("answer3", version=1), 42)
self.assertIsNone(cache.get("answer3", version=2))
self.assertIs(caches["v2"].add("answer3", 37, version=1), False)
self.assertEqual(cache.get("answer3", version=1), 42)
self.assertIsNone(cache.get("answer3", version=2))
self.assertIs(caches["v2"].add("answer3", 37), True)
self.assertEqual(cache.get("answer3", version=1), 42)
self.assertEqual(cache.get("answer3", version=2), 37)
def test_cache_versioning_has_key(self):
cache.set("answer1", 42)
# has_key
self.assertIs(cache.has_key("answer1"), True)
self.assertIs(cache.has_key("answer1", version=1), True)
self.assertIs(cache.has_key("answer1", version=2), False)
self.assertIs(caches["v2"].has_key("answer1"), False)
self.assertIs(caches["v2"].has_key("answer1", version=1), True)
self.assertIs(caches["v2"].has_key("answer1", version=2), False)
def test_cache_versioning_delete(self):
cache.set("answer1", 37, version=1)
cache.set("answer1", 42, version=2)
self.assertIs(cache.delete("answer1"), True)
self.assertIsNone(cache.get("answer1", version=1))
self.assertEqual(cache.get("answer1", version=2), 42)
cache.set("answer2", 37, version=1)
cache.set("answer2", 42, version=2)
self.assertIs(cache.delete("answer2", version=2), True)
self.assertEqual(cache.get("answer2", version=1), 37)
self.assertIsNone(cache.get("answer2", version=2))
cache.set("answer3", 37, version=1)
cache.set("answer3", 42, version=2)
self.assertIs(caches["v2"].delete("answer3"), True)
self.assertEqual(cache.get("answer3", version=1), 37)
self.assertIsNone(cache.get("answer3", version=2))
cache.set("answer4", 37, version=1)
cache.set("answer4", 42, version=2)
self.assertIs(caches["v2"].delete("answer4", version=1), True)
self.assertIsNone(cache.get("answer4", version=1))
self.assertEqual(cache.get("answer4", version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set("answer1", 37, version=1)
cache.set("answer1", 42, version=2)
self.assertEqual(cache.incr("answer1"), 38)
self.assertEqual(cache.get("answer1", version=1), 38)
self.assertEqual(cache.get("answer1", version=2), 42)
self.assertEqual(cache.decr("answer1"), 37)
self.assertEqual(cache.get("answer1", version=1), 37)
self.assertEqual(cache.get("answer1", version=2), 42)
cache.set("answer2", 37, version=1)
cache.set("answer2", 42, version=2)
self.assertEqual(cache.incr("answer2", version=2), 43)
self.assertEqual(cache.get("answer2", version=1), 37)
self.assertEqual(cache.get("answer2", version=2), 43)
self.assertEqual(cache.decr("answer2", version=2), 42)
self.assertEqual(cache.get("answer2", version=1), 37)
self.assertEqual(cache.get("answer2", version=2), 42)
cache.set("answer3", 37, version=1)
cache.set("answer3", 42, version=2)
self.assertEqual(caches["v2"].incr("answer3"), 43)
self.assertEqual(cache.get("answer3", version=1), 37)
self.assertEqual(cache.get("answer3", version=2), 43)
self.assertEqual(caches["v2"].decr("answer3"), 42)
self.assertEqual(cache.get("answer3", version=1), 37)
self.assertEqual(cache.get("answer3", version=2), 42)
cache.set("answer4", 37, version=1)
cache.set("answer4", 42, version=2)
self.assertEqual(caches["v2"].incr("answer4", version=1), 38)
self.assertEqual(cache.get("answer4", version=1), 38)
self.assertEqual(cache.get("answer4", version=2), 42)
self.assertEqual(caches["v2"].decr("answer4", version=1), 37)
self.assertEqual(cache.get("answer4", version=1), 37)
self.assertEqual(cache.get("answer4", version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({"ford1": 37, "arthur1": 42})
self.assertEqual(
cache.get_many(["ford1", "arthur1"]), {"ford1": 37, "arthur1": 42}
)
self.assertEqual(
cache.get_many(["ford1", "arthur1"], version=1),
{"ford1": 37, "arthur1": 42},
)
self.assertEqual(cache.get_many(["ford1", "arthur1"], version=2), {})
self.assertEqual(caches["v2"].get_many(["ford1", "arthur1"]), {})
self.assertEqual(
caches["v2"].get_many(["ford1", "arthur1"], version=1),
{"ford1": 37, "arthur1": 42},
)
self.assertEqual(caches["v2"].get_many(["ford1", "arthur1"], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({"ford2": 37, "arthur2": 42}, version=2)
self.assertEqual(cache.get_many(["ford2", "arthur2"]), {})
self.assertEqual(cache.get_many(["ford2", "arthur2"], version=1), {})
self.assertEqual(
cache.get_many(["ford2", "arthur2"], version=2),
{"ford2": 37, "arthur2": 42},
)
self.assertEqual(
caches["v2"].get_many(["ford2", "arthur2"]), {"ford2": 37, "arthur2": 42}
)
self.assertEqual(caches["v2"].get_many(["ford2", "arthur2"], version=1), {})
self.assertEqual(
caches["v2"].get_many(["ford2", "arthur2"], version=2),
{"ford2": 37, "arthur2": 42},
)
# v2 set, using default version = 2
caches["v2"].set_many({"ford3": 37, "arthur3": 42})
self.assertEqual(cache.get_many(["ford3", "arthur3"]), {})
self.assertEqual(cache.get_many(["ford3", "arthur3"], version=1), {})
self.assertEqual(
cache.get_many(["ford3", "arthur3"], version=2),
{"ford3": 37, "arthur3": 42},
)
self.assertEqual(
caches["v2"].get_many(["ford3", "arthur3"]), {"ford3": 37, "arthur3": 42}
)
self.assertEqual(caches["v2"].get_many(["ford3", "arthur3"], version=1), {})
self.assertEqual(
caches["v2"].get_many(["ford3", "arthur3"], version=2),
{"ford3": 37, "arthur3": 42},
)
# v2 set, default version = 2, but manually override version = 1
caches["v2"].set_many({"ford4": 37, "arthur4": 42}, version=1)
self.assertEqual(
cache.get_many(["ford4", "arthur4"]), {"ford4": 37, "arthur4": 42}
)
self.assertEqual(
cache.get_many(["ford4", "arthur4"], version=1),
{"ford4": 37, "arthur4": 42},
)
self.assertEqual(cache.get_many(["ford4", "arthur4"], version=2), {})
self.assertEqual(caches["v2"].get_many(["ford4", "arthur4"]), {})
self.assertEqual(
caches["v2"].get_many(["ford4", "arthur4"], version=1),
{"ford4": 37, "arthur4": 42},
)
self.assertEqual(caches["v2"].get_many(["ford4", "arthur4"], version=2), {})
def test_incr_version(self):
cache.set("answer", 42, version=2)
self.assertIsNone(cache.get("answer"))
self.assertIsNone(cache.get("answer", version=1))
self.assertEqual(cache.get("answer", version=2), 42)
self.assertIsNone(cache.get("answer", version=3))
self.assertEqual(cache.incr_version("answer", version=2), 3)
self.assertIsNone(cache.get("answer"))
self.assertIsNone(cache.get("answer", version=1))
self.assertIsNone(cache.get("answer", version=2))
self.assertEqual(cache.get("answer", version=3), 42)
caches["v2"].set("answer2", 42)
self.assertEqual(caches["v2"].get("answer2"), 42)
self.assertIsNone(caches["v2"].get("answer2", version=1))
self.assertEqual(caches["v2"].get("answer2", version=2), 42)
self.assertIsNone(caches["v2"].get("answer2", version=3))
self.assertEqual(caches["v2"].incr_version("answer2"), 3)
self.assertIsNone(caches["v2"].get("answer2"))
self.assertIsNone(caches["v2"].get("answer2", version=1))
self.assertIsNone(caches["v2"].get("answer2", version=2))
self.assertEqual(caches["v2"].get("answer2", version=3), 42)
with self.assertRaises(ValueError):
cache.incr_version("does_not_exist")
cache.set("null", None)
self.assertEqual(cache.incr_version("null"), 2)
def test_decr_version(self):
cache.set("answer", 42, version=2)
self.assertIsNone(cache.get("answer"))
self.assertIsNone(cache.get("answer", version=1))
self.assertEqual(cache.get("answer", version=2), 42)
self.assertEqual(cache.decr_version("answer", version=2), 1)
self.assertEqual(cache.get("answer"), 42)
self.assertEqual(cache.get("answer", version=1), 42)
self.assertIsNone(cache.get("answer", version=2))
caches["v2"].set("answer2", 42)
self.assertEqual(caches["v2"].get("answer2"), 42)
self.assertIsNone(caches["v2"].get("answer2", version=1))
self.assertEqual(caches["v2"].get("answer2", version=2), 42)
self.assertEqual(caches["v2"].decr_version("answer2"), 1)
self.assertIsNone(caches["v2"].get("answer2"))
self.assertEqual(caches["v2"].get("answer2", version=1), 42)
self.assertIsNone(caches["v2"].get("answer2", version=2))
with self.assertRaises(ValueError):
cache.decr_version("does_not_exist", version=2)
cache.set("null", None, version=2)
self.assertEqual(cache.decr_version("null", version=2), 1)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set("answer1", 42)
self.assertEqual(cache.get("answer1"), 42)
self.assertIsNone(caches["custom_key"].get("answer1"))
self.assertIsNone(caches["custom_key2"].get("answer1"))
caches["custom_key"].set("answer2", 42)
self.assertIsNone(cache.get("answer2"))
self.assertEqual(caches["custom_key"].get("answer2"), 42)
self.assertEqual(caches["custom_key2"].get("answer2"), 42)
@override_settings(CACHE_MIDDLEWARE_ALIAS=DEFAULT_CACHE_ALIAS)
def test_cache_write_unpicklable_object(self):
fetch_middleware = FetchFromCacheMiddleware(empty_response)
request = self.factory.get("/cache/test")
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNone(get_cache_data)
content = "Testing cookie serialization."
def get_response(req):
response = HttpResponse(content)
response.set_cookie("foo", "bar")
return response
update_middleware = UpdateCacheMiddleware(get_response)
response = update_middleware(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
UpdateCacheMiddleware(lambda req: get_cache_data)(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
# Shouldn't fail silently if trying to cache an unpicklable type.
with self.assertRaises(pickle.PickleError):
cache.add("unpicklable", Unpicklable())
def test_set_fail_on_pickleerror(self):
with self.assertRaises(pickle.PickleError):
cache.set("unpicklable", Unpicklable())
def test_get_or_set(self):
self.assertIsNone(cache.get("projector"))
self.assertEqual(cache.get_or_set("projector", 42), 42)
self.assertEqual(cache.get("projector"), 42)
self.assertIsNone(cache.get_or_set("null", None))
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get("null", "default"))
def test_get_or_set_callable(self):
def my_callable():
return "value"
self.assertEqual(cache.get_or_set("mykey", my_callable), "value")
self.assertEqual(cache.get_or_set("mykey", my_callable()), "value")
self.assertIsNone(cache.get_or_set("null", lambda: None))
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get("null", "default"))
def test_get_or_set_version(self):
msg = "get_or_set() missing 1 required positional argument: 'default'"
self.assertEqual(cache.get_or_set("brian", 1979, version=2), 1979)
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set("brian")
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set("brian", version=1)
self.assertIsNone(cache.get("brian", version=1))
self.assertEqual(cache.get_or_set("brian", 42, version=1), 42)
self.assertEqual(cache.get_or_set("brian", 1979, version=2), 1979)
self.assertIsNone(cache.get("brian", version=3))
def test_get_or_set_racing(self):
with mock.patch(
"%s.%s" % (settings.CACHES["default"]["BACKEND"], "add")
) as cache_add:
# Simulate cache.add() failing to add a value. In that case, the
# default value should be returned.
cache_add.return_value = False
self.assertEqual(cache.get_or_set("key", "default"), "default")
@override_settings(
CACHES=caches_setting_for_tests(
BACKEND="django.core.cache.backends.db.DatabaseCache",
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION="test cache table",
)
)
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ["cache"]
def setUp(self):
# The super calls needs to happen first for the settings override.
super().setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super().tearDown()
self.drop_table()
def create_table(self):
management.call_command("createcachetable", verbosity=0)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name("test cache table")
cursor.execute("DROP TABLE %s" % table_name)
def test_get_many_num_queries(self):
cache.set_many({"a": 1, "b": 2})
cache.set("expired", "expired", 0.01)
with self.assertNumQueries(1):
self.assertEqual(cache.get_many(["a", "b"]), {"a": 1, "b": 2})
time.sleep(0.02)
with self.assertNumQueries(2):
self.assertEqual(cache.get_many(["a", "b", "expired"]), {"a": 1, "b": 2})
def test_delete_many_num_queries(self):
cache.set_many({"a": 1, "b": 2, "c": 3})
with self.assertNumQueries(1):
cache.delete_many(["a", "b", "c"])
def test_cull_queries(self):
old_max_entries = cache._max_entries
# Force _cull to delete on first cached record.
cache._max_entries = -1
with CaptureQueriesContext(connection) as captured_queries:
try:
cache.set("force_cull", "value", 1000)
finally:
cache._max_entries = old_max_entries
num_count_queries = sum("COUNT" in query["sql"] for query in captured_queries)
self.assertEqual(num_count_queries, 1)
# Column names are quoted.
for query in captured_queries:
sql = query["sql"]
if "expires" in sql:
self.assertIn(connection.ops.quote_name("expires"), sql)
if "cache_key" in sql:
self.assertIn(connection.ops.quote_name("cache_key"), sql)
def test_delete_cursor_rowcount(self):
"""
The rowcount attribute should not be checked on a closed cursor.
"""
class MockedCursorWrapper(CursorWrapper):
is_closed = False
def close(self):
self.cursor.close()
self.is_closed = True
@property
def rowcount(self):
if self.is_closed:
raise Exception("Cursor is closed.")
return self.cursor.rowcount
cache.set_many({"a": 1, "b": 2})
with mock.patch("django.db.backends.utils.CursorWrapper", MockedCursorWrapper):
self.assertIs(cache.delete("a"), True)
def test_zero_cull(self):
self._perform_cull_test("zero_cull", 50, 18)
def test_second_call_doesnt_crash(self):
out = io.StringIO()
management.call_command("createcachetable", stdout=out)
self.assertEqual(
out.getvalue(),
"Cache table 'test cache table' already exists.\n" * len(settings.CACHES),
)
@override_settings(
CACHES=caches_setting_for_tests(
BACKEND="django.core.cache.backends.db.DatabaseCache",
# Use another table name to avoid the 'table already exists' message.
LOCATION="createcachetable_dry_run_mode",
)
)
def test_createcachetable_dry_run_mode(self):
out = io.StringIO()
management.call_command("createcachetable", dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = io.StringIO()
management.call_command(
"createcachetable",
"test cache table",
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n")
def test_has_key_query_columns_quoted(self):
with CaptureQueriesContext(connection) as captured_queries:
cache.has_key("key")
self.assertEqual(len(captured_queries), 1)
sql = captured_queries[0]["sql"]
# Column names are quoted.
self.assertIn(connection.ops.quote_name("expires"), sql)
self.assertIn(connection.ops.quote_name("cache_key"), sql)
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter:
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == "django_cache":
return "other"
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == "django_cache":
return "other"
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == "django_cache":
return db == "other"
return None
@override_settings(
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.db.DatabaseCache",
"LOCATION": "my_cache_table",
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
databases = {"default", "other"}
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using="default"):
management.call_command("createcachetable", database="default", verbosity=0)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections["other"].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using="other"):
management.call_command("createcachetable", database="other", verbosity=0)
class PicklingSideEffect:
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
self.locked = self.cache._lock.locked()
return {}
limit_locmem_entries = override_settings(
CACHES=caches_setting_for_tests(
BACKEND="django.core.cache.backends.locmem.LocMemCache",
OPTIONS={"MAX_ENTRIES": 9},
)
)
@override_settings(
CACHES=caches_setting_for_tests(
BACKEND="django.core.cache.backends.locmem.LocMemCache",
)
)
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super().setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches["prefix"]._cache = cache._cache
caches["prefix"]._expire_info = cache._expire_info
caches["v2"]._cache = cache._cache
caches["v2"]._expire_info = cache._expire_info
caches["custom_key"]._cache = cache._cache
caches["custom_key"]._expire_info = cache._expire_info
caches["custom_key2"]._cache = cache._cache
caches["custom_key2"]._expire_info = cache._expire_info
@override_settings(
CACHES={
"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
"other": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "other",
},
}
)
def test_multiple_caches(self):
"Multiple locmem caches are isolated"
cache.set("value", 42)
self.assertEqual(caches["default"].get("value"), 42)
self.assertIsNone(caches["other"].get("value"))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set("set", bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
self.assertIs(cache.add("add", bad_obj), True)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = "value"
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
self.assertEqual(cache.incr(key), 2)
self.assertEqual(expire, cache._expire_info[_key])
self.assertEqual(cache.decr(key), 1)
self.assertEqual(expire, cache._expire_info[_key])
@limit_locmem_entries
def test_lru_get(self):
"""get() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
@limit_locmem_entries
def test_lru_set(self):
"""set() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(3, 9):
cache.set(key, key, timeout=None)
cache.set(9, 9, timeout=None)
for key in range(3, 10):
self.assertEqual(cache.get(key), key)
for key in range(3):
self.assertIsNone(cache.get(key))
@limit_locmem_entries
def test_lru_incr(self):
"""incr() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.incr(key), key + 1)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key + 1)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
# memcached and redis backends aren't guaranteed to be available.
# To check the backends, the test settings file will need to contain at least
# one cache backend setting that points at your cache server.
configured_caches = {}
for _cache_params in settings.CACHES.values():
configured_caches[_cache_params["BACKEND"]] = _cache_params
PyLibMCCache_params = configured_caches.get(
"django.core.cache.backends.memcached.PyLibMCCache"
)
PyMemcacheCache_params = configured_caches.get(
"django.core.cache.backends.memcached.PyMemcacheCache"
)
# The memcached backends don't support cull-related options like `MAX_ENTRIES`.
memcached_excluded_caches = {"cull", "zero_cull"}
RedisCache_params = configured_caches.get("django.core.cache.backends.redis.RedisCache")
# The redis backend does not support cull-related options like `MAX_ENTRIES`.
redis_excluded_caches = {"cull", "zero_cull"}
class BaseMemcachedTests(BaseCacheTests):
# By default it's assumed that the client doesn't clean up connections
# properly, in which case the backend must do so after each request.
should_disconnect_on_close = True
def test_location_multiple_servers(self):
locations = [
["server1.tld", "server2:11211"],
"server1.tld;server2:11211",
"server1.tld,server2:11211",
]
for location in locations:
with self.subTest(location=location):
params = {"BACKEND": self.base_params["BACKEND"], "LOCATION": location}
with self.settings(CACHES={"default": params}):
self.assertEqual(cache._servers, ["server1.tld", "server2:11211"])
def _perform_invalid_key_test(self, key, expected_warning):
"""
While other backends merely warn, memcached should raise for an invalid
key.
"""
msg = expected_warning.replace(key, cache.make_key(key))
tests = [
("add", [key, 1]),
("get", [key]),
("set", [key, 1]),
("incr", [key]),
("decr", [key]),
("touch", [key]),
("delete", [key]),
("get_many", [[key, "b"]]),
("set_many", [{key: 1, "b": 2}]),
("delete_many", [[key, "b"]]),
]
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertRaises(InvalidCacheKey) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.exception), msg)
def test_invalid_with_version_key_length(self):
# make_key() adds a version to the key and exceeds the limit.
key = "a" * 248
expected_warning = (
"Cache key will cause errors if used with memcached: "
"%r (longer than %s)" % (key, 250)
)
self._perform_invalid_key_test(key, expected_warning)
def test_default_never_expiring_timeout(self):
# Regression test for #22845
with self.settings(
CACHES=caches_setting_for_tests(
base=self.base_params, exclude=memcached_excluded_caches, TIMEOUT=None
)
):
cache.set("infinite_foo", "bar")
self.assertEqual(cache.get("infinite_foo"), "bar")
def test_default_far_future_timeout(self):
# Regression test for #22845
with self.settings(
CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
# 60*60*24*365, 1 year
TIMEOUT=31536000,
)
):
cache.set("future_foo", "bar")
self.assertEqual(cache.get("future_foo"), "bar")
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
max_value_length = 2**20
cache.set("small_value", "a")
self.assertEqual(cache.get("small_value"), "a")
large_value = "a" * (max_value_length + 1)
try:
cache.set("small_value", large_value)
except Exception:
# Most clients (e.g. pymemcache or pylibmc) raise when the value is
# too large. This test is primarily checking that the key was
# deleted, so the return/exception behavior for the set() itself is
# not important.
pass
# small_value should be deleted, or set if configured to accept larger values
value = cache.get("small_value")
self.assertTrue(value is None or value == large_value)
def test_close(self):
# For clients that don't manage their connections properly, the
# connection is closed when the request is complete.
signals.request_finished.disconnect(close_old_connections)
try:
with mock.patch.object(
cache._class, "disconnect_all", autospec=True
) as mock_disconnect:
signals.request_finished.send(self.__class__)
self.assertIs(mock_disconnect.called, self.should_disconnect_on_close)
finally:
signals.request_finished.connect(close_old_connections)
def test_set_many_returns_failing_keys(self):
def fail_set_multi(mapping, *args, **kwargs):
return mapping.keys()
with mock.patch.object(cache._class, "set_multi", side_effect=fail_set_multi):
failing_keys = cache.set_many({"key": "value"})
self.assertEqual(failing_keys, ["key"])
@unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured")
@override_settings(
CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
)
)
class PyLibMCCacheTests(BaseMemcachedTests, TestCase):
base_params = PyLibMCCache_params
# libmemcached manages its own connections.
should_disconnect_on_close = False
@property
def incr_decr_type_error(self):
return cache._lib.ClientError
@override_settings(
CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
OPTIONS={
"binary": True,
"behaviors": {"tcp_nodelay": True},
},
)
)
def test_pylibmc_options(self):
self.assertTrue(cache._cache.binary)
self.assertEqual(cache._cache.behaviors["tcp_nodelay"], int(True))
def test_pylibmc_client_servers(self):
backend = self.base_params["BACKEND"]
tests = [
("unix:/run/memcached/socket", "/run/memcached/socket"),
("/run/memcached/socket", "/run/memcached/socket"),
("localhost", "localhost"),
("localhost:11211", "localhost:11211"),
("[::1]", "[::1]"),
("[::1]:11211", "[::1]:11211"),
("127.0.0.1", "127.0.0.1"),
("127.0.0.1:11211", "127.0.0.1:11211"),
]
for location, expected in tests:
settings = {"default": {"BACKEND": backend, "LOCATION": location}}
with self.subTest(location), self.settings(CACHES=settings):
self.assertEqual(cache.client_servers, [expected])
@unittest.skipUnless(PyMemcacheCache_params, "PyMemcacheCache backend not configured")
@override_settings(
CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
)
)
class PyMemcacheCacheTests(BaseMemcachedTests, TestCase):
base_params = PyMemcacheCache_params
@property
def incr_decr_type_error(self):
return cache._lib.exceptions.MemcacheClientError
def test_pymemcache_highest_pickle_version(self):
self.assertEqual(
cache._cache.default_kwargs["serde"]._serialize_func.keywords[
"pickle_version"
],
pickle.HIGHEST_PROTOCOL,
)
for cache_key in settings.CACHES:
for client_key, client in caches[cache_key]._cache.clients.items():
with self.subTest(cache_key=cache_key, server=client_key):
self.assertEqual(
client.serde._serialize_func.keywords["pickle_version"],
pickle.HIGHEST_PROTOCOL,
)
@override_settings(
CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
OPTIONS={"no_delay": True},
)
)
def test_pymemcache_options(self):
self.assertIs(cache._cache.default_kwargs["no_delay"], True)
@override_settings(
CACHES=caches_setting_for_tests(
BACKEND="django.core.cache.backends.filebased.FileBasedCache",
)
)
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super().setUp()
self.dirname = self.mkdtemp()
# Caches location cannot be modified through override_settings /
# modify_settings, hence settings are manipulated directly here and the
# setting_changed signal is triggered manually.
for cache_params in settings.CACHES.values():
cache_params["LOCATION"] = self.dirname
setting_changed.send(self.__class__, setting="CACHES", enter=False)
def tearDown(self):
super().tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def mkdtemp(self):
return tempfile.mkdtemp()
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, "not-a-cache-file")
with open(fname, "w"):
os.utime(fname, None)
cache.clear()
self.assertTrue(
os.path.exists(fname), "Expected cache.clear to ignore non cache files"
)
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(
os.path.exists(self.dirname), "Expected cache.clear to keep the cache dir"
)
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set("foo", "bar")
self.assertTrue(os.path.exists(self.dirname))
def test_get_ignores_enoent(self):
cache.set("foo", "bar")
os.unlink(cache._key_to_file("foo"))
# Returns the default instead of erroring.
self.assertEqual(cache.get("foo", "baz"), "baz")
@skipIf(
sys.platform == "win32",
"Windows only partially supports umasks and chmod.",
)
def test_cache_dir_permissions(self):
os.rmdir(self.dirname)
dir_path = Path(self.dirname) / "nested" / "filebasedcache"
for cache_params in settings.CACHES.values():
cache_params["LOCATION"] = dir_path
setting_changed.send(self.__class__, setting="CACHES", enter=False)
cache.set("foo", "bar")
self.assertIs(dir_path.exists(), True)
tests = [
dir_path,
dir_path.parent,
dir_path.parent.parent,
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o700)
def test_get_does_not_ignore_non_filenotfound_exceptions(self):
with mock.patch("builtins.open", side_effect=OSError):
with self.assertRaises(OSError):
cache.get("foo")
def test_empty_cache_file_considered_expired(self):
cache_file = cache._key_to_file("foo")
with open(cache_file, "wb") as fh:
fh.write(b"")
with open(cache_file, "rb") as fh:
self.assertIs(cache._is_expired(fh), True)
def test_has_key_race_handling(self):
self.assertIs(cache.add("key", "value"), True)
with mock.patch("builtins.open", side_effect=FileNotFoundError) as mocked_open:
self.assertIs(cache.has_key("key"), False)
mocked_open.assert_called_once()
@unittest.skipUnless(RedisCache_params, "Redis backend not configured")
@override_settings(
CACHES=caches_setting_for_tests(
base=RedisCache_params,
exclude=redis_excluded_caches,
)
)
class RedisCacheTests(BaseCacheTests, TestCase):
def setUp(self):
import redis
super().setUp()
self.lib = redis
@property
def incr_decr_type_error(self):
return self.lib.ResponseError
def test_incr_write_connection(self):
cache.set("number", 42)
with mock.patch(
"django.core.cache.backends.redis.RedisCacheClient.get_client"
) as mocked_get_client:
cache.incr("number")
self.assertEqual(mocked_get_client.call_args.kwargs, {"write": True})
def test_cache_client_class(self):
self.assertIs(cache._class, RedisCacheClient)
self.assertIsInstance(cache._cache, RedisCacheClient)
def test_get_backend_timeout_method(self):
positive_timeout = 10
positive_backend_timeout = cache.get_backend_timeout(positive_timeout)
self.assertEqual(positive_backend_timeout, positive_timeout)
negative_timeout = -5
negative_backend_timeout = cache.get_backend_timeout(negative_timeout)
self.assertEqual(negative_backend_timeout, 0)
none_timeout = None
none_backend_timeout = cache.get_backend_timeout(none_timeout)
self.assertIsNone(none_backend_timeout)
def test_get_connection_pool_index(self):
pool_index = cache._cache._get_connection_pool_index(write=True)
self.assertEqual(pool_index, 0)
pool_index = cache._cache._get_connection_pool_index(write=False)
if len(cache._cache._servers) == 1:
self.assertEqual(pool_index, 0)
else:
self.assertGreater(pool_index, 0)
self.assertLess(pool_index, len(cache._cache._servers))
def test_get_connection_pool(self):
pool = cache._cache._get_connection_pool(write=True)
self.assertIsInstance(pool, self.lib.ConnectionPool)
pool = cache._cache._get_connection_pool(write=False)
self.assertIsInstance(pool, self.lib.ConnectionPool)
def test_get_client(self):
self.assertIsInstance(cache._cache.get_client(), self.lib.Redis)
def test_serializer_dumps(self):
self.assertEqual(cache._cache._serializer.dumps(123), 123)
self.assertIsInstance(cache._cache._serializer.dumps(True), bytes)
self.assertIsInstance(cache._cache._serializer.dumps("abc"), bytes)
@override_settings(
CACHES=caches_setting_for_tests(
base=RedisCache_params,
exclude=redis_excluded_caches,
OPTIONS={
"db": 5,
"socket_timeout": 0.1,
"retry_on_timeout": True,
},
)
)
def test_redis_pool_options(self):
pool = cache._cache._get_connection_pool(write=False)
self.assertEqual(pool.connection_kwargs["db"], 5)
self.assertEqual(pool.connection_kwargs["socket_timeout"], 0.1)
self.assertIs(pool.connection_kwargs["retry_on_timeout"], True)
class FileBasedCachePathLibTests(FileBasedCacheTests):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
return Path(tmp_dir)
@override_settings(
CACHES={
"default": {
"BACKEND": "cache.liberal_backend.CacheClass",
},
}
)
class CustomCacheKeyValidationTests(SimpleTestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = "some key with spaces" * 15
val = "a value"
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
"default": {
"BACKEND": "cache.closeable_cache.CacheClass",
}
}
)
class CacheClosingTests(SimpleTestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
def test_close_only_initialized(self):
with self.settings(
CACHES={
"cache_1": {
"BACKEND": "cache.closeable_cache.CacheClass",
},
"cache_2": {
"BACKEND": "cache.closeable_cache.CacheClass",
},
}
):
self.assertEqual(caches.all(initialized_only=True), [])
signals.request_finished.send(self.__class__)
self.assertEqual(caches.all(initialized_only=True), [])
DEFAULT_MEMORY_CACHES_SETTINGS = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "unique-snowflake",
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS["default"]["TIMEOUT"] = None
class DefaultNonExpiringCacheKeyTests(SimpleTestCase):
"""
Settings having Cache arguments with a TIMEOUT=None create Caches that will
set non-expiring keys.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del self.DEFAULT_TIMEOUT
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined in
django.core.cache.backends.base.BaseCache.__init__().
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="settingsprefix",
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
},
USE_I18N=False,
ALLOWED_HOSTS=[".example.com"],
)
class CacheUtils(SimpleTestCase):
"""TestCase for django.utils.cache functions."""
host = "www.example.com"
path = "/cache/test/"
factory = RequestFactory(headers={"host": host})
def tearDown(self):
cache.clear()
def _get_request_cache(self, method="GET", query_string=None, update_cache=None):
request = self._get_request(
self.host, self.path, method, query_string=query_string
)
request._cache_update_cache = update_cache if update_cache else True
return request
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ("Accept-Encoding",), "Accept-Encoding"),
("Accept-Encoding", ("accept-encoding",), "Accept-Encoding"),
("Accept-Encoding", ("ACCEPT-ENCODING",), "Accept-Encoding"),
("Cookie", ("Accept-Encoding",), "Cookie, Accept-Encoding"),
(
"Cookie, Accept-Encoding",
("Accept-Encoding",),
"Cookie, Accept-Encoding",
),
(
"Cookie, Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
(None, ("Accept-Encoding", "COOKIE"), "Accept-Encoding, COOKIE"),
(
"Cookie, Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
(
"Cookie , Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
("*", ("Accept-Language", "Cookie"), "*"),
("Accept-Language, Cookie", ("*",), "*"),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
response = HttpResponse()
if initial_vary is not None:
response.headers["Vary"] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers["Vary"], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
"views.decorators.cache.cache_page.settingsprefix.GET."
"18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e",
)
# A specified key_prefix is taken into account.
key_prefix = "localprefix"
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
"views.decorators.cache.cache_page.localprefix.GET."
"18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e",
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {"test": 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
"views.decorators.cache.cache_page.settingsprefix.GET."
"beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e",
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, headers={"host": "sub-1.example.com"})
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, headers={"host": "sub-2.example.com"})
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response.headers["Vary"] = "Pony"
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
"views.decorators.cache.cache_page.settingsprefix.GET."
"18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e",
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected
# Cache-Control parts.
(None, {"private": True}, {"private"}),
("", {"private": True}, {"private"}),
# no-cache.
("", {"no_cache": "Set-Cookie"}, {"no-cache=Set-Cookie"}),
("", {"no-cache": "Set-Cookie"}, {"no-cache=Set-Cookie"}),
("no-cache=Set-Cookie", {"no_cache": True}, {"no-cache"}),
("no-cache=Set-Cookie,no-cache=Link", {"no_cache": True}, {"no-cache"}),
(
"no-cache=Set-Cookie",
{"no_cache": "Link"},
{"no-cache=Set-Cookie", "no-cache=Link"},
),
(
"no-cache=Set-Cookie,no-cache=Link",
{"no_cache": "Custom"},
{"no-cache=Set-Cookie", "no-cache=Link", "no-cache=Custom"},
),
# Test whether private/public attributes are mutually exclusive
("private", {"private": True}, {"private"}),
("private", {"public": True}, {"public"}),
("public", {"public": True}, {"public"}),
("public", {"private": True}, {"private"}),
(
"must-revalidate,max-age=60,private",
{"public": True},
{"must-revalidate", "max-age=60", "public"},
),
(
"must-revalidate,max-age=60,public",
{"private": True},
{"must-revalidate", "max-age=60", "private"},
),
(
"must-revalidate,max-age=60",
{"public": True},
{"must-revalidate", "max-age=60", "public"},
),
)
cc_delim_re = re.compile(r"\s*,\s*")
for initial_cc, newheaders, expected_cc in tests:
with self.subTest(initial_cc=initial_cc, newheaders=newheaders):
response = HttpResponse()
if initial_cc is not None:
response.headers["Cache-Control"] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response.headers["Cache-Control"]))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"KEY_PREFIX": "cacheprefix",
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
},
)
class CacheHEADTest(SimpleTestCase):
path = "/cache/test/"
factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request)
def test_head_caches_correctly(self):
test_content = "test content"
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = "test content"
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="settingsprefix",
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
},
LANGUAGES=[
("en", "English"),
("es", "Spanish"),
],
)
class CacheI18nTest(SimpleTestCase):
path = "/cache/test/"
factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(
lang,
key,
"Cache keys should include the language name when translation is active",
)
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META["HTTP_ACCEPT_LANGUAGE"] = accept_language
request.META["HTTP_ACCEPT_ENCODING"] = "gzip;q=1.0, identity; q=0.5, *;q=0"
response = HttpResponse()
response.headers["Vary"] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, "en")
request = self.factory.get(self.path)
request.META["HTTP_ACCEPT_ENCODING"] = "gzip;q=1.0, identity; q=0.5, *;q=0"
response = HttpResponse()
response.headers["Vary"] = "accept-encoding"
key = learn_cache_key(request, response)
self.assertIn(
lang,
key,
"Cache keys should include the language name when translation is active",
)
self.check_accept_language_vary(
"en-us", "cookie, accept-language, accept-encoding", key
)
self.check_accept_language_vary(
"en-US", "cookie, accept-encoding, accept-language", key
)
self.check_accept_language_vary(
"en-US,en;q=0.8", "accept-encoding, accept-language, cookie", key
)
self.check_accept_language_vary(
"en-US,en;q=0.8,ko;q=0.6", "accept-language, cookie, accept-encoding", key
)
self.check_accept_language_vary(
"ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ",
"accept-encoding, cookie, accept-language",
key,
)
self.check_accept_language_vary(
"ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4",
"accept-language, accept-encoding, cookie",
key,
)
self.check_accept_language_vary(
"ko;q=1.0,en;q=0.5", "cookie, accept-language, accept-encoding", key
)
self.check_accept_language_vary(
"ko, en", "cookie, accept-encoding, accept-language", key
)
self.check_accept_language_vary(
"ko-KR, en-US", "accept-encoding, accept-language, cookie", key
)
@override_settings(USE_I18N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(
tz,
key,
"Cache keys should include the time zone name when time zones are active",
)
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(
lang,
key,
"Cache keys shouldn't include the language name when i18n isn't active",
)
self.assertNotIn(
tz,
key,
"Cache keys shouldn't include the time zone name when i18n isn't active",
)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
def get_response(req):
return HttpResponse(msg)
translation.activate(lang)
return UpdateCacheMiddleware(get_response)(request)
# cache with non empty request.GET
request = self.factory.get(self.path, {"foo": "bar", "other": "true"})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
# first access, cache must return None
self.assertIsNone(get_cache_data)
content = "Check for cache with QUERY_STRING"
def get_response(req):
return HttpResponse(content)
UpdateCacheMiddleware(get_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {"foo": "bar", "somethingelse": "true"})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, "en", en_message)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
# The cache can be recovered
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, "es", es_message)
# change again the language
translation.activate("en")
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate("es")
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNone(get_cache_data)
def get_stream_response(req):
return StreamingHttpResponse(["Check for cache with streaming content."])
UpdateCacheMiddleware(get_stream_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(
request
)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"KEY_PREFIX": "cacheprefix",
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse("Hello World %s" % value)
def csrf_view(request):
return HttpResponse(csrf(request)["csrf_token"])
@override_settings(
CACHE_MIDDLEWARE_ALIAS="other",
CACHE_MIDDLEWARE_KEY_PREFIX="middlewareprefix",
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
"other": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "other",
"TIMEOUT": "1",
},
},
)
class CacheMiddlewareTest(SimpleTestCase):
factory = RequestFactory()
def setUp(self):
self.default_cache = caches["default"]
self.other_cache = caches["other"]
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super().tearDown()
def test_constructor(self):
"""
The constructor is correctly distinguishing between usage of
CacheMiddleware as Middleware vs. usage of CacheMiddleware as view
decorator and setting attributes appropriately.
"""
# If only one argument is passed in construction, it's being used as
# middleware.
middleware = CacheMiddleware(empty_response)
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, "middlewareprefix")
self.assertEqual(middleware.cache_alias, "other")
self.assertEqual(middleware.cache, self.other_cache)
# If more arguments are being passed in construction, it's being used
# as a decorator. First, test with "defaults":
as_view_decorator = CacheMiddleware(
empty_response, cache_alias=None, key_prefix=None
)
self.assertEqual(
as_view_decorator.cache_timeout, 30
) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, "")
# Value of DEFAULT_CACHE_ALIAS from django.core.cache
self.assertEqual(as_view_decorator.cache_alias, "default")
self.assertEqual(as_view_decorator.cache, self.default_cache)
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(
hello_world_view, cache_timeout=60, cache_alias="other", key_prefix="foo"
)
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, "foo")
self.assertEqual(as_view_decorator_with_custom.cache_alias, "other")
self.assertEqual(as_view_decorator_with_custom.cache, self.other_cache)
def test_update_cache_middleware_constructor(self):
middleware = UpdateCacheMiddleware(empty_response)
self.assertEqual(middleware.cache_timeout, 30)
self.assertIsNone(middleware.page_timeout)
self.assertEqual(middleware.key_prefix, "middlewareprefix")
self.assertEqual(middleware.cache_alias, "other")
self.assertEqual(middleware.cache, self.other_cache)
def test_fetch_cache_middleware_constructor(self):
middleware = FetchFromCacheMiddleware(empty_response)
self.assertEqual(middleware.key_prefix, "middlewareprefix")
self.assertEqual(middleware.cache_alias, "other")
self.assertEqual(middleware.cache, self.other_cache)
def test_middleware(self):
middleware = CacheMiddleware(hello_world_view)
prefix_middleware = CacheMiddleware(hello_world_view, key_prefix="prefix1")
timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1)
request = self.factory.get("/view/")
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, "1")
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b"Hello World 1")
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b"Hello World 1")
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix="prefix1")(hello_world_view)
explicit_default_view = cache_page(3, cache="default")(hello_world_view)
explicit_default_with_prefix_view = cache_page(
3, cache="default", key_prefix="prefix1"
)(hello_world_view)
other_view = cache_page(1, cache="other")(hello_world_view)
other_with_prefix_view = cache_page(1, cache="other", key_prefix="prefix2")(
hello_world_view
)
request = self.factory.get("/view/")
# Request the view once
response = default_view(request, "1")
self.assertEqual(response.content, b"Hello World 1")
# Request again -- hit the cache
response = default_view(request, "2")
self.assertEqual(response.content, b"Hello World 1")
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, "3")
self.assertEqual(response.content, b"Hello World 1")
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, "4")
self.assertEqual(response.content, b"Hello World 4")
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, "5")
self.assertEqual(response.content, b"Hello World 4")
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, "6")
self.assertEqual(response.content, b"Hello World 4")
# Requesting from an alternate cache won't hit cache
response = other_view(request, "7")
self.assertEqual(response.content, b"Hello World 7")
# But a repeated hit will hit cache
response = other_view(request, "8")
self.assertEqual(response.content, b"Hello World 7")
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, "9")
self.assertEqual(response.content, b"Hello World 9")
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches["default"]
response = default_view(request, "11")
self.assertEqual(response.content, b"Hello World 1")
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, "12")
self.assertEqual(response.content, b"Hello World 4")
# ... the explicit default cache will still hit
response = explicit_default_view(request, "13")
self.assertEqual(response.content, b"Hello World 1")
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, "14")
self.assertEqual(response.content, b"Hello World 4")
# .. but a rapidly expiring cache won't hit
response = other_view(request, "15")
self.assertEqual(response.content, b"Hello World 15")
# .. even if it has a prefix
response = other_with_prefix_view(request, "16")
self.assertEqual(response.content, b"Hello World 16")
def test_cache_page_timeout(self):
# Page timeout takes precedence over the "max-age" section of the
# "Cache-Control".
tests = [
(1, 3), # max_age < page_timeout.
(3, 1), # max_age > page_timeout.
]
for max_age, page_timeout in tests:
with self.subTest(max_age=max_age, page_timeout=page_timeout):
view = cache_page(timeout=page_timeout)(
cache_control(max_age=max_age)(hello_world_view)
)
request = self.factory.get("/view/")
response = view(request, "1")
self.assertEqual(response.content, b"Hello World 1")
time.sleep(1)
response = view(request, "2")
self.assertEqual(
response.content,
b"Hello World 1" if page_timeout > max_age else b"Hello World 2",
)
cache.clear()
def test_cached_control_private_not_cached(self):
"""Responses with 'Cache-Control: private' are not cached."""
view_with_private_cache = cache_page(3)(
cache_control(private=True)(hello_world_view)
)
request = self.factory.get("/view/")
response = view_with_private_cache(request, "1")
self.assertEqual(response.content, b"Hello World 1")
response = view_with_private_cache(request, "2")
self.assertEqual(response.content, b"Hello World 2")
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
request = self.factory.get("/view/")
csrf_middleware = CsrfViewMiddleware(csrf_view)
csrf_middleware.process_view(request, csrf_view, (), {})
cache_middleware = CacheMiddleware(csrf_middleware)
self.assertIsNone(cache_middleware.process_request(request))
cache_middleware(request)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
def test_304_response_has_http_caching_headers_but_not_cached(self):
original_view = mock.Mock(return_value=HttpResponseNotModified())
view = cache_page(2)(original_view)
request = self.factory.get("/view/")
# The view shouldn't be cached on the second call.
view(request).close()
response = view(request)
response.close()
self.assertEqual(original_view.call_count, 2)
self.assertIsInstance(response, HttpResponseNotModified)
self.assertIn("Cache-Control", response)
self.assertIn("Expires", response)
def test_per_thread(self):
"""The cache instance is different for each thread."""
thread_caches = []
middleware = CacheMiddleware(empty_response)
def runner():
thread_caches.append(middleware.cache)
for _ in range(2):
thread = threading.Thread(target=runner)
thread.start()
thread.join()
self.assertIsNot(thread_caches[0], thread_caches[1])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="settingsprefix",
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(SimpleTestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the ETag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
path = "/cache/test/"
factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ("Accept-Encoding",), "Accept-Encoding"),
("Accept-Encoding", ("accept-encoding",), "Accept-Encoding"),
("Accept-Encoding", ("ACCEPT-ENCODING",), "Accept-Encoding"),
("Cookie", ("Accept-Encoding",), "Cookie, Accept-Encoding"),
(
"Cookie, Accept-Encoding",
("Accept-Encoding",),
"Cookie, Accept-Encoding",
),
(
"Cookie, Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
(None, ("Accept-Encoding", "COOKIE"), "Accept-Encoding, COOKIE"),
(
"Cookie, Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
(
"Cookie , Accept-Encoding",
("Accept-Encoding", "cookie"),
"Cookie, Accept-Encoding",
),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
template = engines["django"].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response.headers["Vary"] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers["Vary"], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines["django"].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = "localprefix"
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
"views.decorators.cache.cache_page.settingsprefix.GET."
"58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e",
)
# A specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
"views.decorators.cache.cache_page.localprefix.GET."
"58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e",
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {"test": 1})
template = engines["django"].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
"views.decorators.cache.cache_page.settingsprefix.GET."
"0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e",
)
class TestMakeTemplateFragmentKey(SimpleTestCase):
def test_without_vary_on(self):
key = make_template_fragment_key("a.fragment")
self.assertEqual(
key, "template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e"
)
def test_with_one_vary_on(self):
key = make_template_fragment_key("foo", ["abc"])
self.assertEqual(key, "template.cache.foo.493e283d571a73056196f1a68efd0f66")
def test_with_many_vary_on(self):
key = make_template_fragment_key("bar", ["abc", "def"])
self.assertEqual(key, "template.cache.bar.17c1a507a0cb58384f4c639067a93520")
def test_proper_escaping(self):
key = make_template_fragment_key("spam", ["abc:def%"])
self.assertEqual(key, "template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc")
def test_with_ints_vary_on(self):
key = make_template_fragment_key("foo", [1, 2, 3, 4, 5])
self.assertEqual(key, "template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461")
def test_with_unicode_vary_on(self):
key = make_template_fragment_key("foo", ["42º", "😀"])
self.assertEqual(key, "template.cache.foo.7ced1c94e543668590ba39b3c08b0237")
def test_long_vary_on(self):
key = make_template_fragment_key("foo", ["x" * 10000])
self.assertEqual(key, "template.cache.foo.3670b349b5124aa56bdb50678b02b23a")
class CacheHandlerTest(SimpleTestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches["default"]
cache2 = caches["default"]
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches["default"])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
def test_nonexistent_alias(self):
msg = "The connection 'nonexistent' doesn't exist."
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
caches["nonexistent"]
def test_nonexistent_backend(self):
test_caches = CacheHandler(
{
"invalid_backend": {
"BACKEND": "django.nonexistent.NonexistentBackend",
},
}
)
msg = (
"Could not find backend 'django.nonexistent.NonexistentBackend': "
"No module named 'django.nonexistent'"
)
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
test_caches["invalid_backend"]
def test_all(self):
test_caches = CacheHandler(
{
"cache_1": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
},
"cache_2": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
},
}
)
self.assertEqual(test_caches.all(initialized_only=True), [])
cache_1 = test_caches["cache_1"]
self.assertEqual(test_caches.all(initialized_only=True), [cache_1])
self.assertEqual(len(test_caches.all()), 2)
# .all() initializes all caches.
self.assertEqual(len(test_caches.all(initialized_only=True)), 2)
self.assertEqual(test_caches.all(), test_caches.all(initialized_only=True))
|
d1076f79a3675a640cc040eecd8f5a6a1f6b9ca1734b8a8e8742dda39d69db12 | from datetime import date, datetime, time, timezone, tzinfo
from django.test import SimpleTestCase, override_settings
from django.test.utils import TZ_SUPPORT, requires_tz_support
from django.utils import dateformat, translation
from django.utils.dateformat import format
from django.utils.timezone import get_default_timezone, get_fixed_timezone, make_aware
@override_settings(TIME_ZONE="Europe/Copenhagen")
class DateFormatTests(SimpleTestCase):
def setUp(self):
self._orig_lang = translation.get_language()
translation.activate("en-us")
def tearDown(self):
translation.activate(self._orig_lang)
def test_date(self):
d = date(2009, 5, 16)
self.assertEqual(date.fromtimestamp(int(format(d, "U"))), d)
def test_naive_datetime(self):
dt = datetime(2009, 5, 16, 5, 30, 30)
self.assertEqual(datetime.fromtimestamp(int(format(dt, "U"))), dt)
def test_naive_ambiguous_datetime(self):
# dt is ambiguous in Europe/Copenhagen. pytz raises an exception for
# the ambiguity, which results in an empty string.
dt = datetime(2015, 10, 25, 2, 30, 0)
# Try all formatters that involve self.timezone.
self.assertEqual(format(dt, "I"), "")
self.assertEqual(format(dt, "O"), "")
self.assertEqual(format(dt, "T"), "")
self.assertEqual(format(dt, "Z"), "")
@requires_tz_support
def test_datetime_with_local_tzinfo(self):
ltz = get_default_timezone()
dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz)
self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), ltz), dt)
self.assertEqual(
datetime.fromtimestamp(int(format(dt, "U"))), dt.replace(tzinfo=None)
)
@requires_tz_support
def test_datetime_with_tzinfo(self):
tz = get_fixed_timezone(-510)
ltz = get_default_timezone()
dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz)
self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), tz), dt)
self.assertEqual(datetime.fromtimestamp(int(format(dt, "U")), ltz), dt)
# astimezone() is safe here because the target timezone doesn't have DST
self.assertEqual(
datetime.fromtimestamp(int(format(dt, "U"))),
dt.astimezone(ltz).replace(tzinfo=None),
)
self.assertEqual(
datetime.fromtimestamp(int(format(dt, "U")), tz).timetuple(),
dt.astimezone(tz).timetuple(),
)
self.assertEqual(
datetime.fromtimestamp(int(format(dt, "U")), ltz).timetuple(),
dt.astimezone(ltz).timetuple(),
)
def test_epoch(self):
udt = datetime(1970, 1, 1, tzinfo=timezone.utc)
self.assertEqual(format(udt, "U"), "0")
def test_empty_format(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
self.assertEqual(dateformat.format(my_birthday, ""), "")
def test_am_pm(self):
morning = time(7, 00)
evening = time(19, 00)
self.assertEqual(dateformat.format(morning, "a"), "a.m.")
self.assertEqual(dateformat.format(evening, "a"), "p.m.")
self.assertEqual(dateformat.format(morning, "A"), "AM")
self.assertEqual(dateformat.format(evening, "A"), "PM")
def test_microsecond(self):
# Regression test for #18951
dt = datetime(2009, 5, 16, microsecond=123)
self.assertEqual(dateformat.format(dt, "u"), "000123")
def test_date_formats(self):
# Specifiers 'I', 'r', and 'U' are covered in test_timezones().
my_birthday = datetime(1979, 7, 8, 22, 00)
for specifier, expected in [
("b", "jul"),
("d", "08"),
("D", "Sun"),
("E", "July"),
("F", "July"),
("j", "8"),
("l", "Sunday"),
("L", "False"),
("m", "07"),
("M", "Jul"),
("n", "7"),
("N", "July"),
("o", "1979"),
("S", "th"),
("t", "31"),
("w", "0"),
("W", "27"),
("y", "79"),
("Y", "1979"),
("z", "189"),
]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.format(my_birthday, specifier), expected)
def test_date_formats_c_format(self):
timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)
self.assertEqual(
dateformat.format(timestamp, "c"), "2008-05-19T11:45:23.123456"
)
def test_time_formats(self):
# Specifiers 'I', 'r', and 'U' are covered in test_timezones().
my_birthday = datetime(1979, 7, 8, 22, 00)
for specifier, expected in [
("a", "p.m."),
("A", "PM"),
("f", "10"),
("g", "10"),
("G", "22"),
("h", "10"),
("H", "22"),
("i", "00"),
("P", "10 p.m."),
("s", "00"),
("u", "000000"),
]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.format(my_birthday, specifier), expected)
def test_dateformat(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
self.assertEqual(dateformat.format(my_birthday, r"Y z \C\E\T"), "1979 189 CET")
self.assertEqual(dateformat.format(my_birthday, r"jS \o\f F"), "8th of July")
def test_futuredates(self):
the_future = datetime(2100, 10, 25, 0, 00)
self.assertEqual(dateformat.format(the_future, r"Y"), "2100")
def test_day_of_year_leap(self):
self.assertEqual(dateformat.format(datetime(2000, 12, 31), "z"), "366")
def test_timezones(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
summertime = datetime(2005, 10, 30, 1, 00)
wintertime = datetime(2005, 10, 30, 4, 00)
noon = time(12, 0, 0)
# 3h30m to the west of UTC
tz = get_fixed_timezone(-210)
aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)
if TZ_SUPPORT:
for specifier, expected in [
("e", ""),
("O", "+0100"),
("r", "Sun, 08 Jul 1979 22:00:00 +0100"),
("T", "CET"),
("U", "300315600"),
("Z", "3600"),
]:
with self.subTest(specifier=specifier):
self.assertEqual(
dateformat.format(my_birthday, specifier), expected
)
self.assertEqual(dateformat.format(aware_dt, "e"), "-0330")
self.assertEqual(
dateformat.format(aware_dt, "r"),
"Sat, 16 May 2009 05:30:30 -0330",
)
self.assertEqual(dateformat.format(summertime, "I"), "1")
self.assertEqual(dateformat.format(summertime, "O"), "+0200")
self.assertEqual(dateformat.format(wintertime, "I"), "0")
self.assertEqual(dateformat.format(wintertime, "O"), "+0100")
for specifier in ["e", "O", "T", "Z"]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.time_format(noon, specifier), "")
# Ticket #16924 -- We don't need timezone support to test this
self.assertEqual(dateformat.format(aware_dt, "O"), "-0330")
def test_invalid_time_format_specifiers(self):
my_birthday = date(1984, 8, 7)
for specifier in ["a", "A", "f", "g", "G", "h", "H", "i", "P", "s", "u"]:
with self.subTest(specifier=specifier):
msg = (
"The format for date objects may not contain time-related "
f"format specifiers (found {specifier!r})."
)
with self.assertRaisesMessage(TypeError, msg):
dateformat.format(my_birthday, specifier)
@requires_tz_support
def test_e_format_with_named_time_zone(self):
dt = datetime(1970, 1, 1, tzinfo=timezone.utc)
self.assertEqual(dateformat.format(dt, "e"), "UTC")
@requires_tz_support
def test_e_format_with_time_zone_with_unimplemented_tzname(self):
class NoNameTZ(tzinfo):
"""Time zone without .tzname() defined."""
def utcoffset(self, dt):
return None
dt = datetime(1970, 1, 1, tzinfo=NoNameTZ())
self.assertEqual(dateformat.format(dt, "e"), "")
def test_P_format(self):
for expected, t in [
("midnight", time(0)),
("noon", time(12)),
("4 a.m.", time(4)),
("8:30 a.m.", time(8, 30)),
("4 p.m.", time(16)),
("8:30 p.m.", time(20, 30)),
]:
with self.subTest(time=t):
self.assertEqual(dateformat.time_format(t, "P"), expected)
def test_r_format_with_date(self):
# Assume midnight in default timezone if datetime.date provided.
dt = date(2022, 7, 1)
self.assertEqual(
dateformat.format(dt, "r"),
"Fri, 01 Jul 2022 00:00:00 +0200",
)
def test_r_format_with_non_en_locale(self):
# Changing the locale doesn't change the "r" format.
dt = datetime(1979, 7, 8, 22, 00)
with translation.override("fr"):
self.assertEqual(
dateformat.format(dt, "r"),
"Sun, 08 Jul 1979 22:00:00 +0100",
)
def test_S_format(self):
for expected, days in [
("st", [1, 21, 31]),
("nd", [2, 22]),
("rd", [3, 23]),
("th", (n for n in range(4, 31) if n not in [21, 22, 23])),
]:
for day in days:
dt = date(1970, 1, day)
with self.subTest(day=day):
self.assertEqual(dateformat.format(dt, "S"), expected)
def test_y_format_year_before_1000(self):
tests = [
(476, "76"),
(42, "42"),
(4, "04"),
]
for year, expected_date in tests:
with self.subTest(year=year):
self.assertEqual(
dateformat.format(datetime(year, 9, 8, 5, 0), "y"),
expected_date,
)
def test_Y_format_year_before_1000(self):
self.assertEqual(dateformat.format(datetime(1, 1, 1), "Y"), "0001")
self.assertEqual(dateformat.format(datetime(999, 1, 1), "Y"), "0999")
def test_twelve_hour_format(self):
tests = [
(0, "12", "12"),
(1, "1", "01"),
(11, "11", "11"),
(12, "12", "12"),
(13, "1", "01"),
(23, "11", "11"),
]
for hour, g_expected, h_expected in tests:
dt = datetime(2000, 1, 1, hour)
with self.subTest(hour=hour):
self.assertEqual(dateformat.format(dt, "g"), g_expected)
self.assertEqual(dateformat.format(dt, "h"), h_expected)
|
13a1ec19492da8bf9d9a175876c4d175036cf94900cab025ebd7184f120b9be2 | import datetime
from django.test import TestCase
from django.test.utils import requires_tz_support
from django.utils import timezone, translation
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import npgettext_lazy
class TimesinceTests(TestCase):
def setUp(self):
self.t = datetime.datetime(2007, 8, 14, 13, 46, 0)
self.onemicrosecond = datetime.timedelta(microseconds=1)
self.onesecond = datetime.timedelta(seconds=1)
self.oneminute = datetime.timedelta(minutes=1)
self.onehour = datetime.timedelta(hours=1)
self.oneday = datetime.timedelta(days=1)
self.oneweek = datetime.timedelta(days=7)
self.onemonth = datetime.timedelta(days=31)
self.oneyear = datetime.timedelta(days=366)
def test_equal_datetimes(self):
"""equal datetimes."""
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(timesince(self.t, self.t), "0\xa0minutes")
def test_ignore_microseconds_and_seconds(self):
"""Microseconds and seconds are ignored."""
self.assertEqual(
timesince(self.t, self.t + self.onemicrosecond), "0\xa0minutes"
)
self.assertEqual(timesince(self.t, self.t + self.onesecond), "0\xa0minutes")
def test_other_units(self):
"""Test other units."""
self.assertEqual(timesince(self.t, self.t + self.oneminute), "1\xa0minute")
self.assertEqual(timesince(self.t, self.t + self.onehour), "1\xa0hour")
self.assertEqual(timesince(self.t, self.t + self.oneday), "1\xa0day")
self.assertEqual(timesince(self.t, self.t + self.oneweek), "1\xa0week")
self.assertEqual(timesince(self.t, self.t + self.onemonth), "1\xa0month")
self.assertEqual(timesince(self.t, self.t + self.oneyear), "1\xa0year")
def test_multiple_units(self):
"""Test multiple units."""
self.assertEqual(
timesince(self.t, self.t + 2 * self.oneday + 6 * self.onehour),
"2\xa0days, 6\xa0hours",
)
self.assertEqual(
timesince(self.t, self.t + 2 * self.oneweek + 2 * self.oneday),
"2\xa0weeks, 2\xa0days",
)
def test_display_first_unit(self):
"""
If the two differing units aren't adjacent, only the first unit is
displayed.
"""
self.assertEqual(
timesince(
self.t,
self.t + 2 * self.oneweek + 3 * self.onehour + 4 * self.oneminute,
),
"2\xa0weeks",
)
self.assertEqual(
timesince(self.t, self.t + 4 * self.oneday + 5 * self.oneminute),
"4\xa0days",
)
def test_display_second_before_first(self):
"""
When the second date occurs before the first, we should always
get 0 minutes.
"""
self.assertEqual(
timesince(self.t, self.t - self.onemicrosecond), "0\xa0minutes"
)
self.assertEqual(timesince(self.t, self.t - self.onesecond), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.oneminute), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.onehour), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.oneday), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.oneweek), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.onemonth), "0\xa0minutes")
self.assertEqual(timesince(self.t, self.t - self.oneyear), "0\xa0minutes")
self.assertEqual(
timesince(self.t, self.t - 2 * self.oneday - 6 * self.onehour),
"0\xa0minutes",
)
self.assertEqual(
timesince(self.t, self.t - 2 * self.oneweek - 2 * self.oneday),
"0\xa0minutes",
)
self.assertEqual(
timesince(
self.t,
self.t - 2 * self.oneweek - 3 * self.onehour - 4 * self.oneminute,
),
"0\xa0minutes",
)
self.assertEqual(
timesince(self.t, self.t - 4 * self.oneday - 5 * self.oneminute),
"0\xa0minutes",
)
def test_second_before_equal_first_humanize_time_strings(self):
time_strings = {
"minute": npgettext_lazy(
"naturaltime-future",
"%(num)d minute",
"%(num)d minutes",
"num",
),
}
with translation.override("cs"):
for now in [self.t, self.t - self.onemicrosecond, self.t - self.oneday]:
with self.subTest(now):
self.assertEqual(
timesince(self.t, now, time_strings=time_strings),
"0\xa0minut",
)
@requires_tz_support
def test_different_timezones(self):
"""When using two different timezones."""
now = datetime.datetime.now()
now_tz = timezone.make_aware(now, timezone.get_default_timezone())
now_tz_i = timezone.localtime(now_tz, timezone.get_fixed_timezone(195))
self.assertEqual(timesince(now), "0\xa0minutes")
self.assertEqual(timesince(now_tz), "0\xa0minutes")
self.assertEqual(timesince(now_tz_i), "0\xa0minutes")
self.assertEqual(timesince(now_tz, now_tz_i), "0\xa0minutes")
self.assertEqual(timeuntil(now), "0\xa0minutes")
self.assertEqual(timeuntil(now_tz), "0\xa0minutes")
self.assertEqual(timeuntil(now_tz_i), "0\xa0minutes")
self.assertEqual(timeuntil(now_tz, now_tz_i), "0\xa0minutes")
def test_date_objects(self):
"""Both timesince and timeuntil should work on date objects (#17937)."""
today = datetime.date.today()
self.assertEqual(timesince(today + self.oneday), "0\xa0minutes")
self.assertEqual(timeuntil(today - self.oneday), "0\xa0minutes")
def test_both_date_objects(self):
"""Timesince should work with both date objects (#9672)"""
today = datetime.date.today()
self.assertEqual(timeuntil(today + self.oneday, today), "1\xa0day")
self.assertEqual(timeuntil(today - self.oneday, today), "0\xa0minutes")
self.assertEqual(timeuntil(today + self.oneweek, today), "1\xa0week")
def test_leap_year(self):
start_date = datetime.date(2016, 12, 25)
self.assertEqual(timeuntil(start_date + self.oneweek, start_date), "1\xa0week")
self.assertEqual(timesince(start_date, start_date + self.oneweek), "1\xa0week")
def test_leap_year_new_years_eve(self):
t = datetime.date(2016, 12, 31)
now = datetime.datetime(2016, 12, 31, 18, 0, 0)
self.assertEqual(timesince(t + self.oneday, now), "0\xa0minutes")
self.assertEqual(timeuntil(t - self.oneday, now), "0\xa0minutes")
def test_naive_datetime_with_tzinfo_attribute(self):
class naive(datetime.tzinfo):
def utcoffset(self, dt):
return None
future = datetime.datetime(2080, 1, 1, tzinfo=naive())
self.assertEqual(timesince(future), "0\xa0minutes")
past = datetime.datetime(1980, 1, 1, tzinfo=naive())
self.assertEqual(timeuntil(past), "0\xa0minutes")
def test_thousand_years_ago(self):
t = datetime.datetime(1007, 8, 14, 13, 46, 0)
self.assertEqual(timesince(t, self.t), "1000\xa0years")
self.assertEqual(timeuntil(self.t, t), "1000\xa0years")
def test_depth(self):
t = (
self.t
+ self.oneyear
+ self.onemonth
+ self.oneweek
+ self.oneday
+ self.onehour
)
tests = [
(t, 1, "1\xa0year"),
(t, 2, "1\xa0year, 1\xa0month"),
(t, 3, "1\xa0year, 1\xa0month, 1\xa0week"),
(t, 4, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day"),
(t, 5, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day, 1\xa0hour"),
(t, 6, "1\xa0year, 1\xa0month, 1\xa0week, 1\xa0day, 1\xa0hour"),
(self.t + self.onehour, 5, "1\xa0hour"),
(self.t + (4 * self.oneminute), 3, "4\xa0minutes"),
(self.t + self.onehour + self.oneminute, 1, "1\xa0hour"),
(self.t + self.oneday + self.onehour, 1, "1\xa0day"),
(self.t + self.oneweek + self.oneday, 1, "1\xa0week"),
(self.t + self.onemonth + self.oneweek, 1, "1\xa0month"),
(self.t + self.oneyear + self.onemonth, 1, "1\xa0year"),
(self.t + self.oneyear + self.oneweek + self.oneday, 3, "1\xa0year"),
]
for value, depth, expected in tests:
with self.subTest():
self.assertEqual(timesince(self.t, value, depth=depth), expected)
self.assertEqual(timeuntil(value, self.t, depth=depth), expected)
def test_months_edge(self):
t = datetime.datetime(2022, 1, 1)
tests = [
(datetime.datetime(2022, 1, 31), "4\xa0weeks, 2\xa0days"),
(datetime.datetime(2022, 2, 1), "1\xa0month"),
(datetime.datetime(2022, 2, 28), "1\xa0month, 3\xa0weeks"),
(datetime.datetime(2022, 3, 1), "2\xa0months"),
(datetime.datetime(2022, 3, 31), "2\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 4, 1), "3\xa0months"),
(datetime.datetime(2022, 4, 30), "3\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 5, 1), "4\xa0months"),
(datetime.datetime(2022, 5, 31), "4\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 6, 1), "5\xa0months"),
(datetime.datetime(2022, 6, 30), "5\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 7, 1), "6\xa0months"),
(datetime.datetime(2022, 7, 31), "6\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 8, 1), "7\xa0months"),
(datetime.datetime(2022, 8, 31), "7\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 9, 1), "8\xa0months"),
(datetime.datetime(2022, 9, 30), "8\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 10, 1), "9\xa0months"),
(datetime.datetime(2022, 10, 31), "9\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 11, 1), "10\xa0months"),
(datetime.datetime(2022, 11, 30), "10\xa0months, 4\xa0weeks"),
(datetime.datetime(2022, 12, 1), "11\xa0months"),
(datetime.datetime(2022, 12, 31), "11\xa0months, 4\xa0weeks"),
]
for value, expected in tests:
with self.subTest():
self.assertEqual(timesince(t, value), expected)
def test_depth_invalid(self):
msg = "depth must be greater than 0."
with self.assertRaisesMessage(ValueError, msg):
timesince(self.t, self.t, depth=0)
|
3e865fb2715f4a01c3d58d329a16cb6379fa10286c02bfe68503be624f0e39f7 | import platform
import unittest
from datetime import datetime, timezone
from unittest import mock
from django.test import SimpleTestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import (
base36_to_int,
content_disposition_header,
escape_leading_slashes,
http_date,
int_to_base36,
is_same_domain,
parse_etags,
parse_header_parameters,
parse_http_date,
quote_etag,
url_has_allowed_host_and_scheme,
urlencode,
urlsafe_base64_decode,
urlsafe_base64_encode,
)
class URLEncodeTests(SimpleTestCase):
cannot_encode_none_msg = (
"Cannot encode None for key 'a' in a query string. Did you mean to "
"pass an empty string or omit the value?"
)
def test_tuples(self):
self.assertEqual(urlencode((("a", 1), ("b", 2), ("c", 3))), "a=1&b=2&c=3")
def test_dict(self):
result = urlencode({"a": 1, "b": 2, "c": 3})
# Dictionaries are treated as unordered.
self.assertIn(
result,
[
"a=1&b=2&c=3",
"a=1&c=3&b=2",
"b=2&a=1&c=3",
"b=2&c=3&a=1",
"c=3&a=1&b=2",
"c=3&b=2&a=1",
],
)
def test_dict_containing_sequence_not_doseq(self):
self.assertEqual(urlencode({"a": [1, 2]}, doseq=False), "a=%5B1%2C+2%5D")
def test_dict_containing_tuple_not_doseq(self):
self.assertEqual(urlencode({"a": (1, 2)}, doseq=False), "a=%281%2C+2%29")
def test_custom_iterable_not_doseq(self):
class IterableWithStr:
def __str__(self):
return "custom"
def __iter__(self):
yield from range(0, 3)
self.assertEqual(urlencode({"a": IterableWithStr()}, doseq=False), "a=custom")
def test_dict_containing_sequence_doseq(self):
self.assertEqual(urlencode({"a": [1, 2]}, doseq=True), "a=1&a=2")
def test_dict_containing_empty_sequence_doseq(self):
self.assertEqual(urlencode({"a": []}, doseq=True), "")
def test_multivaluedict(self):
result = urlencode(
MultiValueDict(
{
"name": ["Adrian", "Simon"],
"position": ["Developer"],
}
),
doseq=True,
)
# MultiValueDicts are similarly unordered.
self.assertIn(
result,
[
"name=Adrian&name=Simon&position=Developer",
"position=Developer&name=Adrian&name=Simon",
],
)
def test_dict_with_bytes_values(self):
self.assertEqual(urlencode({"a": b"abc"}, doseq=True), "a=abc")
def test_dict_with_sequence_of_bytes(self):
self.assertEqual(
urlencode({"a": [b"spam", b"eggs", b"bacon"]}, doseq=True),
"a=spam&a=eggs&a=bacon",
)
def test_dict_with_bytearray(self):
self.assertEqual(urlencode({"a": bytearray(range(2))}, doseq=True), "a=0&a=1")
def test_generator(self):
self.assertEqual(urlencode({"a": range(2)}, doseq=True), "a=0&a=1")
self.assertEqual(urlencode({"a": range(2)}, doseq=False), "a=range%280%2C+2%29")
def test_none(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": None})
def test_none_in_sequence(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": [None]}, doseq=True)
def test_none_in_generator(self):
def gen():
yield None
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": gen()}, doseq=True)
class Base36IntTests(SimpleTestCase):
def test_roundtrip(self):
for n in [0, 1, 1000, 1000000]:
self.assertEqual(n, base36_to_int(int_to_base36(n)))
def test_negative_input(self):
with self.assertRaisesMessage(ValueError, "Negative base36 conversion input."):
int_to_base36(-1)
def test_to_base36_errors(self):
for n in ["1", "foo", {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
int_to_base36(n)
def test_invalid_literal(self):
for n in ["#", " "]:
with self.assertRaisesMessage(
ValueError, "invalid literal for int() with base 36: '%s'" % n
):
base36_to_int(n)
def test_input_too_large(self):
with self.assertRaisesMessage(ValueError, "Base36 input too large"):
base36_to_int("1" * 14)
def test_to_int_errors(self):
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
base36_to_int(n)
def test_values(self):
for n, b36 in [(0, "0"), (1, "1"), (42, "16"), (818469960, "django")]:
self.assertEqual(int_to_base36(n), b36)
self.assertEqual(base36_to_int(b36), n)
class URLHasAllowedHostAndSchemeTests(unittest.TestCase):
def test_bad_urls(self):
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
r"\\example.com",
r"\\\example.com",
r"/\\/example.com",
r"\\\example.com",
r"\\example.com",
r"\\//example.com",
r"/\/example.com",
r"\/example.com",
r"/\example.com",
"http:///example.com",
r"http:/\//example.com",
r"http:\/example.com",
r"http:/\example.com",
'javascript:alert("XSS")',
"\njavascript:alert(x)",
"java\nscript:alert(x)",
"\x08//example.com",
r"http://otherserver\@example.com",
r"http:\\testserver\@example.com",
r"http://testserver\me:[email protected]",
r"http://testserver\@example.com",
r"http:\\testserver\confirm\[email protected]",
"http:999999999",
"ftp:9999999999",
"\n",
"http://[2001:cdba:0000:0000:0000:0000:3257:9652/",
"http://2001:cdba:0000:0000:0000:0000:3257:9652]/",
)
for bad_url in bad_urls:
with self.subTest(url=bad_url):
self.assertIs(
url_has_allowed_host_and_scheme(
bad_url, allowed_hosts={"testserver", "testserver2"}
),
False,
)
def test_good_urls(self):
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"http://testserver/[email protected]",
"/url%20with%20spaces/",
"path/http:2222222222",
)
for good_url in good_urls:
with self.subTest(url=good_url):
self.assertIs(
url_has_allowed_host_and_scheme(
good_url, allowed_hosts={"otherserver", "testserver"}
),
True,
)
def test_basic_auth(self):
# Valid basic auth credentials are allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
r"http://user:pass@testserver/", allowed_hosts={"user:pass@testserver"}
),
True,
)
def test_no_allowed_hosts(self):
# A path without host is allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
"/confirm/[email protected]", allowed_hosts=None
),
True,
)
# Basic auth without host is not allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
r"http://testserver\@example.com", allowed_hosts=None
),
False,
)
def test_allowed_hosts_str(self):
self.assertIs(
url_has_allowed_host_and_scheme(
"http://good.com/good", allowed_hosts="good.com"
),
True,
)
self.assertIs(
url_has_allowed_host_and_scheme(
"http://good.co/evil", allowed_hosts="good.com"
),
False,
)
def test_secure_param_https_urls(self):
secure_urls = (
"https://example.com/p",
"HTTPS://example.com/p",
"/view/?param=http://example.com",
)
for url in secure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(
url, allowed_hosts={"example.com"}, require_https=True
),
True,
)
def test_secure_param_non_https_urls(self):
insecure_urls = (
"http://example.com/p",
"ftp://example.com/p",
"//example.com/p",
)
for url in insecure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(
url, allowed_hosts={"example.com"}, require_https=True
),
False,
)
class URLSafeBase64Tests(unittest.TestCase):
def test_roundtrip(self):
bytestring = b"foo"
encoded = urlsafe_base64_encode(bytestring)
decoded = urlsafe_base64_decode(encoded)
self.assertEqual(bytestring, decoded)
class IsSameDomainTests(unittest.TestCase):
def test_good(self):
for pair in (
("example.com", "example.com"),
("example.com", ".example.com"),
("foo.example.com", ".example.com"),
("example.com:8888", "example.com:8888"),
("example.com:8888", ".example.com:8888"),
("foo.example.com:8888", ".example.com:8888"),
):
self.assertIs(is_same_domain(*pair), True)
def test_bad(self):
for pair in (
("example2.com", "example.com"),
("foo.example.com", "example.com"),
("example.com:9999", "example.com:8888"),
("foo.example.com:8888", ""),
):
self.assertIs(is_same_domain(*pair), False)
class ETagProcessingTests(unittest.TestCase):
def test_parsing(self):
self.assertEqual(
parse_etags(r'"" , "etag", "e\\tag", W/"weak"'),
['""', '"etag"', r'"e\\tag"', 'W/"weak"'],
)
self.assertEqual(parse_etags("*"), ["*"])
# Ignore RFC 2616 ETags that are invalid according to RFC 9110.
self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"'])
def test_quoting(self):
self.assertEqual(quote_etag("etag"), '"etag"') # unquoted
self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted
self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak
class HttpDateProcessingTests(unittest.TestCase):
def test_http_date(self):
t = 1167616461.0
self.assertEqual(http_date(t), "Mon, 01 Jan 2007 01:54:21 GMT")
def test_parsing_rfc1123(self):
parsed = parse_http_date("Sun, 06 Nov 1994 08:49:37 GMT")
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
@unittest.skipIf(platform.architecture()[0] == "32bit", "The Year 2038 problem.")
@mock.patch("django.utils.http.datetime.datetime")
def test_parsing_rfc850(self, mocked_datetime):
mocked_datetime.side_effect = datetime
mocked_datetime.now = mock.Mock()
now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
tests = (
(
now_1,
"Tuesday, 31-Dec-69 08:49:37 GMT",
datetime(2069, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_1,
"Tuesday, 10-Nov-70 08:49:37 GMT",
datetime(1970, 11, 10, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_1,
"Sunday, 06-Nov-94 08:49:37 GMT",
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_2,
"Wednesday, 31-Dec-70 08:49:37 GMT",
datetime(2070, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_2,
"Friday, 31-Dec-71 08:49:37 GMT",
datetime(1971, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_3,
"Sunday, 31-Dec-00 08:49:37 GMT",
datetime(2000, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
),
(
now_3,
"Friday, 31-Dec-99 08:49:37 GMT",
datetime(1999, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
),
)
for now, rfc850str, expected_date in tests:
with self.subTest(rfc850str=rfc850str):
mocked_datetime.now.return_value = now
parsed = parse_http_date(rfc850str)
mocked_datetime.now.assert_called_once_with(tz=timezone.utc)
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
expected_date,
)
mocked_datetime.reset_mock()
def test_parsing_asctime(self):
parsed = parse_http_date("Sun Nov 6 08:49:37 1994")
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
def test_parsing_asctime_nonascii_digits(self):
"""Non-ASCII unicode decimals raise an error."""
with self.assertRaises(ValueError):
parse_http_date("Sun Nov 6 08:49:37 1994")
with self.assertRaises(ValueError):
parse_http_date("Sun Nov 12 08:49:37 1994")
def test_parsing_year_less_than_70(self):
parsed = parse_http_date("Sun Nov 6 08:49:37 0037")
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(2037, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
class EscapeLeadingSlashesTests(unittest.TestCase):
def test(self):
tests = (
("//example.com", "/%2Fexample.com"),
("//", "/%2F"),
)
for url, expected in tests:
with self.subTest(url=url):
self.assertEqual(escape_leading_slashes(url), expected)
class ParseHeaderParameterTests(unittest.TestCase):
def test_basic(self):
tests = [
("text/plain", ("text/plain", {})),
("text/vnd.just.made.this.up ; ", ("text/vnd.just.made.this.up", {})),
("text/plain;charset=us-ascii", ("text/plain", {"charset": "us-ascii"})),
(
'text/plain ; charset="us-ascii"',
("text/plain", {"charset": "us-ascii"}),
),
(
'text/plain ; charset="us-ascii"; another=opt',
("text/plain", {"charset": "us-ascii", "another": "opt"}),
),
(
'attachment; filename="silly.txt"',
("attachment", {"filename": "silly.txt"}),
),
(
'attachment; filename="strange;name"',
("attachment", {"filename": "strange;name"}),
),
(
'attachment; filename="strange;name";size=123;',
("attachment", {"filename": "strange;name", "size": "123"}),
),
(
'form-data; name="files"; filename="fo\\"o;bar"',
("form-data", {"name": "files", "filename": 'fo"o;bar'}),
),
]
for header, expected in tests:
with self.subTest(header=header):
self.assertEqual(parse_header_parameters(header), expected)
def test_rfc2231_parsing(self):
test_data = (
(
"Content-Type: application/x-stuff; "
"title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
"This is ***fun***",
),
(
"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
"foo-ä.html",
),
(
"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
"foo-ä.html",
),
)
for raw_line, expected_title in test_data:
parsed = parse_header_parameters(raw_line)
self.assertEqual(parsed[1]["title"], expected_title)
def test_rfc2231_wrong_title(self):
"""
Test wrongly formatted RFC 2231 headers (missing double single quotes).
Parsing should not crash (#24209).
"""
test_data = (
(
"Content-Type: application/x-stuff; "
"title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
"'This%20is%20%2A%2A%2Afun%2A%2A%2A",
),
("Content-Type: application/x-stuff; title*='foo.html", "'foo.html"),
("Content-Type: application/x-stuff; title*=bar.html", "bar.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header_parameters(raw_line)
self.assertEqual(parsed[1]["title"], expected_title)
class ContentDispositionHeaderTests(unittest.TestCase):
def test_basic(self):
tests = (
((False, None), None),
((False, "example"), 'inline; filename="example"'),
((True, None), "attachment"),
((True, "example"), 'attachment; filename="example"'),
(
(True, '"example" file\\name'),
'attachment; filename="\\"example\\" file\\\\name"',
),
((True, "espécimen"), "attachment; filename*=utf-8''esp%C3%A9cimen"),
(
(True, '"espécimen" filename'),
"attachment; filename*=utf-8''%22esp%C3%A9cimen%22%20filename",
),
)
for (is_attachment, filename), expected in tests:
with self.subTest(is_attachment=is_attachment, filename=filename):
self.assertEqual(
content_disposition_header(is_attachment, filename), expected
)
|
f85713f495cc79486965c37e816d563e2e15e280bf196aa981137d47e86e79ef | import datetime
import pickle
import sys
import unittest
from operator import attrgetter
from threading import Lock
from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import CharField, Count, Exists, F, Max, OuterRef, Q
from django.db.models.expressions import RawSQL
from django.db.models.functions import ExtractYear, Length, LTrim
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.where import AND, OR, NothingNode, WhereNode
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext, ignore_warnings, register_lookup
from django.utils.deprecation import RemovedInDjango50Warning
from .models import (
FK1,
Annotation,
Article,
Author,
BaseA,
BaseUser,
Book,
CategoryItem,
CategoryRelationship,
Celebrity,
Channel,
Chapter,
Child,
ChildObjectA,
Classroom,
CommonMixedCaseForeignKeys,
Company,
Cover,
CustomPk,
CustomPkTag,
DateTimePK,
Detail,
DumbCategory,
Eaten,
Employment,
ExtraInfo,
Fan,
Food,
Identifier,
Individual,
Item,
Job,
JobResponsibilities,
Join,
LeafA,
LeafB,
LoopX,
LoopZ,
ManagedModel,
Member,
MixedCaseDbColumnCategoryItem,
MixedCaseFieldCategoryItem,
ModelA,
ModelB,
ModelC,
ModelD,
MyObject,
NamedCategory,
Node,
Note,
NullableName,
Number,
ObjectA,
ObjectB,
ObjectC,
OneToOneCategory,
Order,
OrderItem,
Page,
Paragraph,
Person,
Plaything,
PointerA,
Program,
ProxyCategory,
ProxyObjectA,
ProxyObjectB,
Ranking,
Related,
RelatedIndividual,
RelatedObject,
Report,
ReportComment,
ReservedName,
Responsibility,
School,
SharedConnection,
SimpleCategory,
SingleObject,
SpecialCategory,
Staff,
StaffUser,
Student,
Tag,
Task,
Teacher,
Ticket21203Child,
Ticket21203Parent,
Ticket23605A,
Ticket23605B,
Ticket23605C,
TvChef,
Valid,
X,
)
class Queries1Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.nc1 = generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
cls.n3 = Note.objects.create(note="n3", misc="foo", id=3, negate=False)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
ann2.notes.add(cls.n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(
info="e2", note=cls.n2, value=41, filterable=False
)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1, value=42)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name="a4", num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(
name="one",
created=cls.time1,
modified=cls.time1,
creator=cls.a1,
note=cls.n3,
)
cls.i1.tags.set([cls.t1, cls.t2])
cls.i2 = Item.objects.create(
name="two", created=cls.time2, creator=cls.a2, note=cls.n2
)
cls.i2.tags.set([cls.t1, cls.t3])
cls.i3 = Item.objects.create(
name="three", created=time3, creator=cls.a2, note=cls.n3
)
cls.i4 = Item.objects.create(
name="four", created=time4, creator=cls.a4, note=cls.n3
)
cls.i4.tags.set([cls.t4])
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
cls.c1 = Cover.objects.create(title="first", item=cls.i4)
cls.c2 = Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {"T", "U", "V"})
self.assertIn("v0", str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {"T", "U", "V"})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn("w0", str(qs4.query).lower())
# So, 'U0."id"' is referenced in SELECT and WHERE twice.
self.assertEqual(str(qs4.query).lower().count("u0."), 4)
def test_ticket1050(self):
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=True),
[self.i3],
)
self.assertSequenceEqual(
Item.objects.filter(tags__id__isnull=True),
[self.i3],
)
def test_ticket1801(self):
self.assertSequenceEqual(
Author.objects.filter(item=self.i2),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i3),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
[self.a2],
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(
Q(creator__name="fred") | Q(tags=self.t2)
),
[self.i1],
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), []
)
self.assertSequenceEqual(
Item.objects.filter(
Q(tags=self.t1), Q(creator__name="fred") | Q(tags=self.t2)
),
[],
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertSequenceEqual(list(qs), [self.a2])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertSequenceEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
[self.i2],
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by("name")[:3],
[self.i1, self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name")[:3],
[self.i1, self.i2],
)
def test_tickets_2080_3592(self):
self.assertSequenceEqual(
Author.objects.filter(item__name="one") | Author.objects.filter(name="a3"),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="one") | Q(name="a3")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(name="a3") | Q(item__name="one")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="three") | Q(report__name="r3")),
[self.a2],
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [])
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values("creator").distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name="four", created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name="two")
.values("creator", "name")
.distinct()
.count(),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name", "foo")
.distinct()
.count()
),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name")
.distinct()
.count()
),
4,
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values("creator", "name").count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2])
self.assertSequenceEqual(q2, [self.i1])
self.assertSequenceEqual(
(q1 | q2).order_by("name"),
[self.i4, self.i1, self.i3, self.i2],
)
self.assertSequenceEqual((q1 & q2).order_by("name"), [self.i1])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertSequenceEqual(
((q1 & q2) | q3).order_by("name"),
[self.i4, self.i1],
)
def test_order_by_tables(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by("name").query
self.assertEqual(
len(
[
t
for t in combined_query.alias_map
if combined_query.alias_refcount[t]
]
),
1,
)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by("greatest_fan__fan_of")
self.assertIn("OUTER JOIN", str(qs.query))
qs = qs.order_by("id")
self.assertNotIn("OUTER JOIN", str(qs.query))
def test_order_by_related_field_transform(self):
extra_12 = ExtraInfo.objects.create(
info="extra 12",
date=DateTimePK.objects.create(date=datetime.datetime(2021, 12, 10)),
)
extra_11 = ExtraInfo.objects.create(
info="extra 11",
date=DateTimePK.objects.create(date=datetime.datetime(2022, 11, 10)),
)
self.assertSequenceEqual(
ExtraInfo.objects.filter(date__isnull=False).order_by("date__month"),
[extra_11, extra_12],
)
def test_filter_by_related_field_transform(self):
extra_old = ExtraInfo.objects.create(
info="extra 12",
date=DateTimePK.objects.create(date=datetime.datetime(2020, 12, 10)),
)
ExtraInfo.objects.create(info="extra 11", date=DateTimePK.objects.create())
a5 = Author.objects.create(name="a5", num=5005, extra=extra_old)
fk_field = ExtraInfo._meta.get_field("date")
with register_lookup(fk_field, ExtractYear):
self.assertSequenceEqual(
ExtraInfo.objects.filter(date__year=2020),
[extra_old],
)
self.assertSequenceEqual(
Author.objects.filter(extra__date__year=2020), [a5]
)
def test_filter_by_related_field_nested_transforms(self):
extra = ExtraInfo.objects.create(info=" extra")
a5 = Author.objects.create(name="a5", num=5005, extra=extra)
info_field = ExtraInfo._meta.get_field("info")
with register_lookup(info_field, Length), register_lookup(CharField, LTrim):
self.assertSequenceEqual(
Author.objects.filter(extra__info__ltrim__length=5), [a5]
)
def test_get_clears_ordering(self):
"""
get() should clear ordering for optimization purposes.
"""
with CaptureQueriesContext(connection) as captured_queries:
Author.objects.order_by("name").get(pk=self.a1.pk)
self.assertNotIn("order by", captured_queries[0]["sql"].lower())
def test_tickets_4088_4306(self):
self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__num=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__id=1001), [])
self.assertSequenceEqual(
Report.objects.filter(creator__id=self.a1.id), [self.r1]
)
self.assertSequenceEqual(Report.objects.filter(creator__name="a1"), [self.r1])
def test_ticket4510(self):
self.assertSequenceEqual(
Author.objects.filter(report__name="r1"),
[self.a1],
)
def test_ticket7378(self):
self.assertSequenceEqual(self.a1.report_set.all(), [self.r1])
def test_tickets_5324_6704(self):
self.assertSequenceEqual(
Item.objects.filter(tags__name="t4"),
[self.i4],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct(),
[self.i1, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct().reverse(),
[self.i2, self.i3, self.i1],
)
self.assertSequenceEqual(
Author.objects.exclude(item__name="one").distinct().order_by("name"),
[self.a2, self.a3, self.a4],
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").order_by("name"),
[self.i4, self.i3],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").exclude(tags__name="t4"),
[self.i3],
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
)
self.assertEqual(
len(
[
x
for x in qs.query.alias_map.values()
if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]
]
),
1,
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertSequenceEqual(
Tag.objects.filter(parent__isnull=True).order_by("name"), [self.t1]
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__isnull=True).order_by("name"),
[self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__name="t1") | Q(parent__isnull=True)).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name="t1")).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
def test_ticket2091(self):
t = Tag.objects.get(name="t4")
self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4])
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = sys.getrecursionlimit() // 16
msg = "Maximum recursion depth exceeded: too many subqueries."
with self.assertRaisesMessage(RecursionError, msg):
for i in range(local_recursion_limit + 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases,
{
"T",
"U",
"V",
"W",
"X",
"Y",
"Z",
"AA",
"AB",
"AC",
"AD",
"AE",
"AF",
"AG",
"AH",
"AI",
"AJ",
"AK",
"AL",
"AM",
"AN",
},
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
msg = "Cannot combine queries on two different base models."
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() & Tag.objects.all()
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() | Tag.objects.all()
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={"foo": "1"}).count(), 4)
self.assertEqual(
Author.objects.extra(select={"foo": "%s"}, select_params=(1,)).count(), 4
)
def test_ticket2400(self):
self.assertSequenceEqual(
Author.objects.filter(item__isnull=True),
[self.a3],
)
self.assertSequenceEqual(
Tag.objects.filter(item__isnull=True),
[self.t5],
)
def test_ticket2496(self):
self.assertSequenceEqual(
Item.objects.extra(tables=["queries_author"])
.select_related()
.order_by("name")[:1],
[self.i4],
)
def test_error_raised_on_filter_with_dictionary(self):
with self.assertRaisesMessage(FieldError, "Cannot parse keyword query as dict"):
Note.objects.filter({"note": "n1", "misc": "foo"})
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertSequenceEqual(
Item.objects.order_by("note__note", "name"),
[self.i2, self.i4, self.i1, self.i3],
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertSequenceEqual(
Author.objects.order_by("extra", "-name"),
[self.a2, self.a1, self.a4, self.a3],
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2])
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertSequenceEqual(
Item.objects.order_by("creator", "name"),
[self.i1, self.i3, self.i2, self.i4],
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=False).order_by("tags", "id"),
[self.i1, self.i2, self.i1, self.i2, self.i4],
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by("name")
self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2])
self.assertEqual(len(qs.query.alias_map), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by("note__note", "name")
self.assertQuerySetEqual(qs, [self.i2, self.i4, self.i1, self.i3])
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertEqual(repr(qs[0].note), "<Note: n2>")
self.assertEqual(repr(qs[0].creator.extra.note), "<Note: n1>")
def test_ticket3037(self):
self.assertSequenceEqual(
Item.objects.filter(
Q(creator__name="a3", name="two") | Q(creator__name="a4", name="four")
),
[self.i4],
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertSequenceEqual(
Note.objects.values("misc").distinct().order_by("note", "-misc"),
[{"misc": "foo"}, {"misc": "bar"}, {"misc": "foo"}],
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn("note_id", ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertSequenceEqual(
ExtraInfo.objects.values("note_id"), [{"note_id": 1}, {"note_id": 2}]
)
# ...or use the field name.
self.assertSequenceEqual(
ExtraInfo.objects.values("note"), [{"note": 1}, {"note": 2}]
)
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertSequenceEqual(
Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
),
[self.a1],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
).filter(id=self.a1.id),
[self.a1],
)
def test_ticket6981(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_ticket9926(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent", "category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.select_related("parent", "parent__category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes("created", "month").count(), 1)
self.assertEqual(Item.objects.datetimes("created", "day").count(), 2)
self.assertEqual(len(Item.objects.datetimes("created", "day")), 2)
self.assertEqual(
Item.objects.datetimes("created", "day")[0],
datetime.datetime(2007, 12, 19, 0, 0),
)
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(select={"a": 1}),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
self.assertSequenceEqual(
Item.objects.extra(select={"a": 1}).datetimes("created", "day"),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
name = "one"
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(
where=["name=%s"], params=[name]
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
self.assertSequenceEqual(
Item.objects.extra(where=["name=%s"], params=[name]).datetimes(
"created", "day"
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_ticket7155(self):
# Nullable dates
self.assertSequenceEqual(
Item.objects.datetimes("modified", "day"),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_order_by_rawsql(self):
self.assertSequenceEqual(
Item.objects.values("note__note").order_by(
RawSQL("queries_note.note", ()),
"id",
),
[
{"note__note": "n2"},
{"note__note": "n3"},
{"note__note": "n3"},
{"note__note": "n3"},
],
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertSequenceEqual(
Tag.objects.filter(parent=self.t1, name="t3").order_by("name"),
[self.t3],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent=self.t1, name="t3").order_by("name"),
[self.t1, self.t2, self.t4, self.t5],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1", name="one")
.order_by("name")
.distinct(),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(name__in=["three", "four"])
.exclude(tags__name="t1")
.order_by("name"),
[self.i4, self.i3],
)
# More twisted cases, involving nested negations.
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one")),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(~Q(tags__name="t1", name="one"), name="two"),
[self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one"), name="two"),
[self.i4, self.i1, self.i3],
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(query2.get_compiler(qs.db).as_sql()[0], query)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer("name", "creator")
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertSequenceEqual(
self.n1.annotation_set.filter(
Q(tag=self.t5)
| Q(tag__children=self.t5)
| Q(tag__children__children=self.t5)
),
[self.ann1],
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertSequenceEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
[self.i1, self.i2],
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal="m")
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertSequenceEqual(q.all(), [])
self.assertSequenceEqual(q.filter(meal="m"), [])
self.assertSequenceEqual(q.exclude(meal="m"), [])
self.assertSequenceEqual(q.complex_filter({"pk": 1}), [])
self.assertSequenceEqual(q.select_related("food"), [])
self.assertSequenceEqual(q.annotate(Count("food")), [])
self.assertSequenceEqual(q.order_by("meal", "food"), [])
self.assertSequenceEqual(q.distinct(), [])
self.assertSequenceEqual(q.extra(select={"foo": "1"}), [])
self.assertSequenceEqual(q.reverse(), [])
q.query.low_mark = 1
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
q.extra(select={"foo": "1"})
self.assertSequenceEqual(q.defer("meal"), [])
self.assertSequenceEqual(q.only("meal"), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(len(Note.objects.order_by("extrainfo__info").distinct()), 3)
# Pickling of QuerySets using datetimes() should work.
qs = Item.objects.datetimes("created", "month")
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertSequenceEqual(
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name")
),
[self.t2, self.t3],
)
# Multi-valued values() and values_list() querysets should raise errors.
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name", "id")
)
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values_list("name", "id")
)
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertSequenceEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{"id": 1}, {"id": 2}, {"id": 3}],
)
self.assertSequenceEqual(
Annotation.objects.filter(
notes__in=Note.objects.filter(note="n1")
.values_list("note")
.values("id")
),
[self.ann1],
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue
# properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
yield n_obj.pk
self.assertSequenceEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertSequenceEqual(qs, [self.a1, self.a2])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertSequenceEqual(qs, [self.a3, self.a4])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertSequenceEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name="a1")),
[self.a1],
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertSequenceEqual(
Item.objects.exclude(modified=self.time1).order_by("name"),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__name=self.t1.name),
[self.t1, self.t4, self.t5],
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by("parent__name")), 5)
# Empty querysets can be merged with others.
self.assertSequenceEqual(
Note.objects.none() | Note.objects.all(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.all() | Note.objects.none(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), [])
self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertSequenceEqual(
Author.objects.filter(
Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name="xyz")
),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(report=self.r1, name="xyz") | Q(item__note__extrainfo=self.e2)
),
[self.a2],
)
self.assertSequenceEqual(
Annotation.objects.filter(
Q(tag__parent=self.t1) | Q(notes__note="n1", name="a1")
),
[self.ann1],
)
xx = ExtraInfo.objects.create(info="xx", note=self.n3)
self.assertSequenceEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
[self.n1, self.n3],
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len(
[
x
for x in q.alias_map.values()
if x.join_type == LOUTER and q.alias_refcount[x.table_alias]
]
),
1,
)
def test_ticket17429(self):
"""
Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertCountEqual(
Tag.objects.all(),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerySetEqual(
Item.objects.exclude(tags__name="t4"),
Item.objects.filter(~Q(tags__name="t4")),
)
self.assertQuerySetEqual(
Item.objects.exclude(Q(tags__name="t4") | Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | Q(tags__name="t3"))),
)
self.assertQuerySetEqual(
Item.objects.exclude(Q(tags__name="t4") | ~Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | ~Q(tags__name="t3"))),
)
def test_nested_exclude(self):
self.assertQuerySetEqual(
Item.objects.exclude(~Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
def test_double_exclude(self):
self.assertQuerySetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
self.assertQuerySetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~Q(~Q(tags__name="t4"))),
)
def test_exclude_in(self):
self.assertQuerySetEqual(
Item.objects.exclude(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~Q(tags__name__in=["t4", "t3"])),
)
self.assertQuerySetEqual(
Item.objects.filter(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~~Q(tags__name__in=["t4", "t3"])),
)
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertSequenceEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertSequenceEqual(q, [self.nc1])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertSequenceEqual(q, [self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 2)
self.assertNotIn("INNER JOIN", str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertSequenceEqual(
q,
[self.a1, self.a1, self.a2, self.a2, self.a2, self.a3],
)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 4)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertSequenceEqual(q, [self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q1 & q2
self.assertSequenceEqual(q3, [])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
def test_ticket19672(self):
self.assertSequenceEqual(
Report.objects.filter(
Q(creator__isnull=False) & ~Q(creator__extra__value=41)
),
[self.r1],
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count("item"))
qs = qs.filter(~Q(extra__value=0)).order_by("name")
self.assertIn("SELECT", str(qs.query))
self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4])
def test_lookup_constraint_fielderror(self):
msg = (
"Cannot resolve keyword 'unknown_field' into field. Choices are: "
"annotation, category, category_id, children, id, item, "
"managedmodel, name, note, parent, parent_id"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(unknown_field__name="generic")
def test_common_mixed_case_foreign_keys(self):
"""
Valid query should be generated when fields fetched from joined tables
include FKs whose names only differ by case.
"""
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
c3 = SimpleCategory.objects.create(name="c3")
category = CategoryItem.objects.create(category=c1)
mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(
CaTeGoRy=c2
)
mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(
category=c3
)
CommonMixedCaseForeignKeys.objects.create(
category=category,
mixed_case_field_category=mixed_case_field_category,
mixed_case_db_column_category=mixed_case_db_column_category,
)
qs = CommonMixedCaseForeignKeys.objects.values(
"category",
"mixed_case_field_category",
"mixed_case_db_column_category",
"category__category",
"mixed_case_field_category__CaTeGoRy",
"mixed_case_db_column_category__category",
)
self.assertTrue(qs.first())
def test_excluded_intermediary_m2m_table_joined(self):
self.assertSequenceEqual(
Note.objects.filter(~Q(tag__annotation__name=F("note"))),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.filter(tag__annotation__name="a1").filter(
~Q(tag__annotation__name=F("note"))
),
[],
)
def test_field_with_filterable(self):
self.assertSequenceEqual(
Author.objects.filter(extra=self.e2),
[self.a3, self.a4],
)
def test_negate_field(self):
self.assertSequenceEqual(
Note.objects.filter(negate=True),
[self.n1, self.n2],
)
self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3])
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.num4 = Number.objects.create(num=4)
cls.num8 = Number.objects.create(num=8)
cls.num12 = Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertSequenceEqual(Number.objects.filter(num__lt=4), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertSequenceEqual(
Number.objects.filter(num__gt=8, num__lt=13),
[self.num12],
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
[self.num8],
)
def test_ticket12239(self):
# Custom lookups are registered to round float values correctly on gte
# and lt IntegerField queries.
self.assertSequenceEqual(
Number.objects.filter(num__gt=11.9),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gt=12), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), [])
self.assertCountEqual(
Number.objects.filter(num__lt=12),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.0),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=11.9),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12.0),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), [])
self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), [])
self.assertCountEqual(
Number.objects.filter(num__lte=11.9),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.0),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.9),
[self.num4, self.num8, self.num12],
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(TestCase):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertSequenceEqual(Valid.objects.all(), [])
def test_datetimes_invalid_field(self):
# An error should be raised when QuerySet.datetimes() is passed the
# wrong type of field.
msg = "'name' isn't a DateField, TimeField, or DateTimeField."
with self.assertRaisesMessage(TypeError, msg):
Item.objects.datetimes("name", "month")
def test_ticket22023(self):
with self.assertRaisesMessage(
TypeError, "Cannot call only() after .values() or .values_list()"
):
Valid.objects.values().only()
with self.assertRaisesMessage(
TypeError, "Cannot call defer() after .values() or .values_list()"
):
Valid.objects.values().defer()
class Queries4Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
n1 = Note.objects.create(note="n1", misc="foo")
n2 = Note.objects.create(note="n2", misc="bar")
e1 = ExtraInfo.objects.create(info="e1", note=n1)
e2 = ExtraInfo.objects.create(info="e2", note=n2)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
cls.i1 = Item.objects.create(
name="i1", created=datetime.datetime.now(), note=n1, creator=cls.a1
)
cls.i2 = Item.objects.create(
name="i2", created=datetime.datetime.now(), note=n1, creator=cls.a3
)
def test_ticket24525(self):
tag = Tag.objects.create()
anth100 = tag.note_set.create(note="ANTH", misc="100")
math101 = tag.note_set.create(note="MATH", misc="101")
s1 = tag.annotation_set.create(name="1")
s2 = tag.annotation_set.create(name="2")
s1.notes.set([math101, anth100])
s2.notes.set([math101])
result = math101.annotation_set.all() & tag.annotation_set.exclude(
notes__in=[anth100]
)
self.assertEqual(list(result), [s2])
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
msg = (
"Unsaved model instance <NamedCategory: Other> cannot be used in an ORM "
"query."
)
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(
Q(creator__isnull=True) | Q(creator__extra__info="e1")
)
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(
Q(creator__extra__info="e1")
)
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(
Q(creator__extra__info="e1") | Q(creator__isnull=True)
)
q2 = Report.objects.filter(
Q(creator__extra__info="e1")
) | Report.objects.filter(Q(creator__isnull=True))
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator=self.a1) | Q(creator__report__name="r1")
).order_by()
q2 = (
Item.objects.filter(Q(creator=self.a1)).order_by()
| Item.objects.filter(Q(creator__report__name="r1")).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator__report__name="e1") | Q(creator=self.a1)
).order_by()
q2 = (
Item.objects.filter(Q(creator__report__name="e1")).order_by()
| Item.objects.filter(Q(creator=self.a1)).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Joins having identical connections are correctly recreated in the
# rhs query, in case the query is ORed together (#18748).
Report.objects.create(name="r4", creator=self.a1)
q1 = Author.objects.filter(report__name="r5")
q2 = Author.objects.filter(report__name="r4").filter(report__name="r1")
combined = q1 | q2
self.assertEqual(str(combined.query).count("JOIN"), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, "a1")
def test_combine_or_filter_reuse(self):
combined = Author.objects.filter(name="a1") | Author.objects.filter(name="a3")
self.assertEqual(combined.get(name="a1"), self.a1)
def test_join_reuse_order(self):
# Join aliases are reused in order. This shouldn't raise AssertionError
# because change_map contains a circular reference (#26522).
s1 = School.objects.create()
s2 = School.objects.create()
s3 = School.objects.create()
t1 = Teacher.objects.create()
otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1)
qs1 = otherteachers.filter(schools=s1).filter(schools=s2)
qs2 = otherteachers.filter(schools=s1).filter(schools=s3)
self.assertSequenceEqual(qs1 | qs2, [])
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL.
ManagedModel.objects.create(data="mm1", tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data="mm"), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ""
else:
expected_null_charfield_repr = None
self.assertSequenceEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by(
"name"
),
["e1", "e2", expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertSequenceEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
[self.r1, self.r2, self.r3],
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, "d2")
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by("name")
self.assertIn("ORDER BY", qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertSequenceEqual(
SimpleCategory.objects.order_by("categoryitem", "pk"), [c1, c2, c1]
)
def test_filter_reverse_non_integer_pk(self):
date_obj = DateTimePK.objects.create()
extra_obj = ExtraInfo.objects.create(info="extra", date=date_obj)
self.assertEqual(
DateTimePK.objects.filter(extrainfo=extra_obj).get(),
date_obj,
)
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertSequenceEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), []
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertCountEqual(qs, [ci2, ci3])
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertCountEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(
category__onetoonecategory__isnull=False
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(
category__onetoonecategory__isnull=True
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
e2 = ExtraInfo.objects.create(info="e2", note=cls.n2)
a1 = Author.objects.create(name="a1", num=1001, extra=e1)
a2 = Author.objects.create(name="a2", num=2002, extra=e1)
a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.rank2 = Ranking.objects.create(rank=2, author=a2)
cls.rank1 = Ranking.objects.create(rank=1, author=a3)
cls.rank3 = Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
self.assertSequenceEqual(
Ranking.objects.order_by("rank"),
[self.rank1, self.rank2, self.rank3],
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertSequenceEqual(
Ranking.objects.extra(
tables=["django_site"], order_by=["-django_site.id", "rank"]
),
[self.rank1, self.rank2, self.rank3],
)
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
self.assertEqual(
[o.good for o in qs.extra(order_by=("-good",))], [True, False, False]
)
self.assertSequenceEqual(
qs.extra(order_by=("-good", "id")),
[self.rank3, self.rank2, self.rank1],
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values("id", "rank").order_by("id")
self.assertEqual([d["rank"] for d in dicts], [2, 1, 3])
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
dicts = qs.values().order_by("id")
for d in dicts:
del d["id"]
del d["author_id"]
self.assertEqual(
[sorted(d.items()) for d in dicts],
[
[("good", 0), ("rank", 2)],
[("good", 0), ("rank", 1)],
[("good", 1), ("rank", 3)],
],
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=["django_site"])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
author_start = Author.objects.get(name="a1")
ranking_start = Ranking.objects.get(author__name="a1")
# Make sure that the IDs from different tables don't happen to match.
self.assertSequenceEqual(
Ranking.objects.filter(author__name="a1"),
[self.rank3],
)
self.assertEqual(Ranking.objects.filter(author__name="a1").update(rank=4636), 1)
r = Ranking.objects.get(author__name="a1")
self.assertEqual(r.id, ranking_start.id)
self.assertEqual(r.author.id, author_start.id)
self.assertEqual(r.rank, 4636)
r.rank = 3
r.save()
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertSequenceEqual(
Note.objects.exclude(Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q() | ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() & ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() ^ ~Q()),
[self.n1, self.n2],
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(Note.objects.extra(select={"foo": "'%%s'"})[0].foo, "%s")
self.assertEqual(
Note.objects.extra(select={"foo": "'%%s bar %%s'"})[0].foo, "%s bar %s"
)
self.assertEqual(
Note.objects.extra(select={"foo": "'bar %%s'"})[0].foo, "bar %s"
)
def test_extra_select_alias_sql_injection(self):
crafted_alias = """injected_name" from "queries_note"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Note.objects.extra(select={crafted_alias: "1"})
def test_queryset_reuse(self):
# Using querysets doesn't mutate aliases.
authors = Author.objects.filter(Q(name="a1") | Q(name="nonexistent"))
self.assertEqual(Ranking.objects.filter(author__in=authors).get(), self.rank3)
self.assertEqual(authors.count(), 1)
def test_filter_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
msg = "Passing unsaved model instances to related filters is deprecated."
company = Company.objects.create(name="Django")
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.filter(staff=Staff(name="unsaved"))
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertSequenceEqual(X.objects.all(), [])
self.assertSequenceEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerySetEqual(Related.objects.order_by("custom"), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
p1 = Plaything.objects.create(name="p1")
self.assertSequenceEqual(Plaything.objects.all(), [p1])
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name="s")
r = RelatedObject.objects.create(single=s, f=1)
p2 = Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.filter(others__isnull=False).order_by("pk")
self.assertNotIn("JOIN", str(qs.query))
qs = Plaything.objects.filter(others__f__isnull=False).order_by("pk")
self.assertIn("INNER", str(qs.query))
qs = qs.order_by("others__single__name")
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count("LEFT"), 1)
self.assertEqual(str(qs.query).count("INNER"), 1)
self.assertSequenceEqual(qs, [p2])
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object related to the LeafA we create.
l1 = LeafA.objects.create(data="first")
self.assertSequenceEqual(LeafA.objects.all(), [l1])
self.assertSequenceEqual(
LeafA.objects.filter(Q(data="first") | Q(join__b__data="second")),
[l1],
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(note=self.n1)
| ExtraInfo.objects.filter(info="e2")
).filter(note=self.n1),
[self.e1],
)
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(info="e2")
| ExtraInfo.objects.filter(note=self.n1)
).filter(note=self.n1),
[self.e1],
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(n1)
cls.ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
def test_parallel_iterators(self):
# Parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), "<Tag: t1>")
self.assertEqual(repr(next(i1)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t1>")
self.assertEqual(repr(next(i2)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t3>")
self.assertEqual(repr(next(i1)), "<Tag: t3>")
qs = X.objects.all()
self.assertFalse(qs)
self.assertFalse(qs)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(qs.query.get_compiler(qs.db).as_sql()[0].count("SELECT"), 2)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# preemptively discovered cases).
self.assertSequenceEqual(
PointerA.objects.filter(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
PointerA.objects.exclude(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
Tag.objects.exclude(children=None),
[self.t1, self.t3],
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertSequenceEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
[self.t1, self.t4, self.t5],
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertSequenceEqual(
Annotation.objects.exclude(tag__children__name="t2"),
[self.ann2],
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertSequenceEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
[self.ann1],
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by("name")
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name="foo")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 1)
def test_distinct_ordered_sliced_subquery_aggregation(self):
self.assertEqual(
Tag.objects.distinct().order_by("category__name")[:3].count(), 3
)
def test_multiple_columns_with_the_same_name_slice(self):
self.assertEqual(
list(
Tag.objects.order_by("name").values_list("name", "category__name")[:2]
),
[("t1", "Generic"), ("t2", "Generic")],
)
self.assertSequenceEqual(
Tag.objects.order_by("name").select_related("category")[:2],
[self.t1, self.t2],
)
self.assertEqual(
list(Tag.objects.order_by("-name").values_list("name", "parent__name")[:2]),
[("t5", "t3"), ("t4", "t3")],
)
self.assertSequenceEqual(
Tag.objects.order_by("-name").select_related("parent")[:2],
[self.t5, self.t4],
)
def test_col_alias_quoted(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertEqual(
Tag.objects.values("parent")
.annotate(
tag_per_parent=Count("pk"),
)
.aggregate(Max("tag_per_parent")),
{"tag_per_parent__max": 2},
)
sql = captured_queries[0]["sql"]
self.assertIn("AS %s" % connection.ops.quote_name("col1"), sql)
def test_xor_subquery(self):
self.assertSequenceEqual(
Tag.objects.filter(
Exists(Tag.objects.filter(id=OuterRef("id"), name="t3"))
^ Exists(Tag.objects.filter(id=OuterRef("id"), parent=self.t1))
),
[self.t2],
)
class RawQueriesTests(TestCase):
@classmethod
def setUpTestData(cls):
Note.objects.create(note="n1", misc="foo", id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ["n1"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>"
)
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ["n1", "foo"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs),
"<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>",
)
class GeneratorExpressionTests(SimpleTestCase):
def test_ticket10432(self):
# Using an empty iterator as the rvalue for an "__in"
# lookup is legal.
self.assertCountEqual(Note.objects.filter(pk__in=iter(())), [])
class ComparisonTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
item_ab = Item.objects.create(
name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
item_xy = Item.objects.create(
name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="x%Y"),
[item_xy],
)
self.assertSequenceEqual(
Item.objects.filter(name__istartswith="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iendswith="A_b"),
[item_ab],
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"]
id, name = connection.ops.quote_name("id"), connection.ops.quote_name("name")
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_distinct_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertIs(Article.objects.distinct().exists(), False)
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]["sql"]
self.assertNotIn(connection.ops.quote_name("id"), captured_sql)
self.assertNotIn(connection.ops.quote_name("name"), captured_sql)
def test_sliced_distinct_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertIs(Article.objects.distinct()[1:3].exists(), False)
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]["sql"]
self.assertIn(connection.ops.quote_name("id"), captured_sql)
self.assertIn(connection.ops.quote_name("name"), captured_sql)
def test_ticket_18414(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_18414_distinct_on(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct("name").exists())
self.assertTrue(Article.objects.distinct("name")[1:2].exists())
self.assertFalse(Article.objects.distinct("name")[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertIs(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertIs(Tag.objects.all().ordered, True)
self.assertIs(Tag.objects.order_by().ordered, False)
def test_explicit_ordering(self):
self.assertIs(Annotation.objects.order_by("id").ordered, True)
def test_empty_queryset(self):
self.assertIs(Annotation.objects.none().ordered, True)
def test_order_by_extra(self):
self.assertIs(Annotation.objects.extra(order_by=["id"]).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count("notes"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("num_notes").ordered, True)
def test_annotated_default_ordering(self):
qs = Tag.objects.annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
def test_annotated_values_default_ordering(self):
qs = Tag.objects.values("name").annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
NamedCategory.objects.create(id=1, name="first")
NamedCategory.objects.create(id=2, name="second")
NamedCategory.objects.create(id=3, name="third")
NamedCategory.objects.create(id=4, name="fourth")
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)
self.assertEqual(set(query.values_list("id", flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:3]
)[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
t2 = Tag.objects.create(name="t2", category=generic)
ManagedModel.objects.create(data="mm1", tag=t1, public=True)
mm2 = ManagedModel.objects.create(data="mm2", tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by("-id")[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:1]
).delete()
self.assertEqual(
set(DumbCategory.objects.values_list("id", flat=True)), {1, 2, 3}
)
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {1, 3})
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {3})
def test_distinct_ordered_sliced_subquery(self):
# Implicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by("name")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["first", "fourth"],
)
# Explicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct()
.order_by("-name")
.values("id")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["second", "third"],
)
# Annotated value.
self.assertSequenceEqual(
DumbCategory.objects.filter(
id__in=DumbCategory.objects.annotate(double_id=F("id") * 2)
.order_by("id")
.distinct()
.values("double_id")[0:2],
)
.order_by("id")
.values_list("id", flat=True),
[2, 4],
)
class QuerySetBitwiseOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.school = School.objects.create()
cls.room_1 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 1"
)
cls.room_2 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 2"
)
cls.room_3 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 3"
)
cls.room_4 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 4"
)
tag = Tag.objects.create()
cls.annotation_1 = Annotation.objects.create(tag=tag)
annotation_2 = Annotation.objects.create(tag=tag)
note = cls.annotation_1.notes.create(tag=tag)
cls.base_user_1 = BaseUser.objects.create(annotation=cls.annotation_1)
cls.base_user_2 = BaseUser.objects.create(annotation=annotation_2)
cls.task = Task.objects.create(
owner=cls.base_user_2,
creator=cls.base_user_2,
note=note,
)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_3, self.room_4])
def test_subquery_aliases(self):
combined = School.objects.filter(pk__isnull=False) & School.objects.filter(
Exists(
Classroom.objects.filter(
has_blackboard=True,
school=OuterRef("pk"),
)
),
)
self.assertSequenceEqual(combined, [self.school])
nested_combined = School.objects.filter(pk__in=combined.values("pk"))
self.assertSequenceEqual(nested_combined, [self.school])
def test_conflicting_aliases_during_combine(self):
qs1 = self.annotation_1.baseuser_set.all()
qs2 = BaseUser.objects.filter(
Q(owner__note__in=self.annotation_1.notes.all())
| Q(creator__note__in=self.annotation_1.notes.all())
)
self.assertSequenceEqual(qs1, [self.base_user_1])
self.assertSequenceEqual(qs2, [self.base_user_2])
self.assertCountEqual(qs2 | qs1, qs1 | qs2)
self.assertCountEqual(qs2 | qs1, [self.base_user_1, self.base_user_2])
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"""
If a queryset is already evaluated, it can still be used as a query arg.
"""
n = Note(note="Test1", misc="misc")
n.save()
e = ExtraInfo(info="good", note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Make one of cached results unpickable.
n_list._result_cache[0].lock = Lock()
with self.assertRaises(TypeError):
pickle.dumps(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, "good")
def test_no_model_options_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model options shouldn't be cloned."
)
try:
Note.objects.filter(pk__lte=F("pk") + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model fields shouldn't be cloned"
)
try:
Note.objects.filter(note=F("misc")).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(SimpleTestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertCountEqual(Number.objects.none().values("num").order_by("num"), [])
def test_values_subquery(self):
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values("pk")), []
)
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values_list("pk")), []
)
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertCountEqual(q.values(), [])
self.assertCountEqual(q.values_list(), [])
class ValuesQuerysetTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(1, 2),
)
qs = qs.order_by("value_minus_x")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"}
)
qs = qs.order_by("value_minus_one").order_by("value_plus_one")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(
select={
"value_plus_one": "num+1",
"value_minus_one": "num-1",
"constant_value": "1",
}
)
qs = qs.order_by("value_plus_one", "value_minus_one", "constant_value")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"},
order_by=["value_minus_one"],
)
qs = qs.values("num")
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s"},
select_params=[1],
order_by=["value_plus_x"],
)
qs = qs.filter(num=72)
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(72, 72),
)
qs = qs.order_by("value_minus_x")
qs = qs.filter(num=1)
qs = qs.values("num")
self.assertSequenceEqual(qs, [])
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num")
self.assertSequenceEqual(qs, [(72,)])
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_field_error_values_list(self):
# see #23443
msg = (
"Cannot resolve keyword %r into field. Join on 'name' not permitted."
% "foo"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.values_list("name__foo")
def test_named_values_list_flat(self):
msg = "'flat' and 'named' can't be used together."
with self.assertRaisesMessage(TypeError, msg):
Number.objects.values_list("num", flat=True, named=True)
def test_named_values_list_bad_field_name(self):
msg = "Type names and field names must be valid identifiers: '1'"
with self.assertRaisesMessage(ValueError, msg):
Number.objects.extra(select={"1": "num+1"}).values_list(
"1", named=True
).first()
def test_named_values_list_with_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list("num", "num2", named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(values._fields, ("num", "num2"))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
def test_named_values_list_without_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list(named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(
values._fields,
("num2", "id", "num", "other_num", "another_num", "id__count"),
)
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
self.assertEqual(values.id__count, 1)
def test_named_values_list_expression_with_default_alias(self):
expr = Count("id")
values = (
Number.objects.annotate(id__count1=expr)
.values_list(expr, "id__count1", named=True)
.first()
)
self.assertEqual(values._fields, ("id__count2", "id__count1"))
def test_named_values_list_expression(self):
expr = F("num") + 1
qs = Number.objects.annotate(combinedexpression1=expr).values_list(
expr, "combinedexpression1", named=True
)
values = qs.first()
self.assertEqual(values._fields, ("combinedexpression2", "combinedexpression1"))
def test_named_values_pickle(self):
value = Number.objects.values_list("num", "other_num", named=True).get()
self.assertEqual(value, (72, None))
self.assertEqual(pickle.loads(pickle.dumps(value)), value)
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
cls.articles = [
Article.objects.create(name=f"Article {i}", created=some_date)
for i in range(1, 8)
]
def get_ordered_articles(self):
return Article.objects.order_by("name")
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, "Article 1")
self.assertSequenceEqual(
self.get_ordered_articles()[1:3],
[self.articles[1], self.articles[2]],
)
def test_slicing_with_steps_can_be_used(self):
self.assertSequenceEqual(
self.get_ordered_articles()[::2],
[
self.articles[0],
self.articles[2],
self.articles[4],
self.articles[6],
],
)
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][0:2],
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][4:], [self.articles[4]]
)
self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertSequenceEqual(
self.get_ordered_articles()[2:][0:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][2:3], [self.articles[4]]
)
# Using an offset without a limit is also possible.
self.assertSequenceEqual(
self.get_ordered_articles()[5:],
[self.articles[5], self.articles[6]],
)
def test_slicing_cannot_filter_queryset_once_sliced(self):
msg = "Cannot filter a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].filter(id=1)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
msg = "Cannot reorder a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].order_by("id")
def test_slicing_cannot_combine_queries_once_sliced(self):
msg = "Cannot combine queries once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:1] & Article.objects.all()[4:5]
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1]
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[0:-5]
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1:]
def test_invalid_index(self):
msg = "QuerySet indices must be integers or slices, not str."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()["foo"]
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact="Article 1")), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact="Article 1")
s2 = Article.objects.filter(name__exact="Article 2")
self.assertSequenceEqual(
(s1 | s2).order_by("name"),
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
Article.objects.create(name="three", created=datetime.datetime.now())
Article.objects.create(name="four", created=datetime.datetime.now())
food = Food.objects.create(name="spam")
Eaten.objects.create(meal="spam with eggs", food=food)
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertSequenceEqual(Article.objects.all()[0:0], [])
self.assertSequenceEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[:0].latest("created")
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
def test_empty_sliced_subquery(self):
self.assertEqual(
Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0
)
def test_empty_sliced_subquery_exclude(self):
self.assertEqual(
Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1
)
def test_zero_length_values_slicing(self):
n = 42
with self.assertNumQueries(0):
self.assertQuerySetEqual(Article.objects.values()[n:n], [])
self.assertQuerySetEqual(Article.objects.values_list()[n:n], [])
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
r_a = ReservedName.objects.create(name="a", order=42)
r_b = ReservedName.objects.create(name="b", order=37)
self.assertSequenceEqual(
ReservedName.objects.order_by("order"),
[r_b, r_a],
)
self.assertSequenceEqual(
ReservedName.objects.extra(
select={"stuff": "name"}, order_by=("order", "stuff")
),
[r_b, r_a],
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_in_subquery(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name="apple"))),
{lunch},
)
self.assertEqual(
set(
Eaten.objects.filter(
food__in=Food.objects.filter(name="apple").values("eaten__meal")
)
),
set(),
)
self.assertEqual(
set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal="lunch"))),
{apple},
)
def test_nested_in_subquery(self):
extra = ExtraInfo.objects.create()
author = Author.objects.create(num=42, extra=extra)
report = Report.objects.create(creator=author)
comment = ReportComment.objects.create(report=report)
comments = ReportComment.objects.filter(
report__in=Report.objects.filter(
creator__in=extra.author_set.all(),
),
)
self.assertSequenceEqual(comments, [comment])
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(set(Eaten.objects.filter(food=apple)), {lunch, dinner})
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(set(Food.objects.filter(eaten=lunch)), {apple})
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(parent=node1)), [node2])
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(node=node2)), [node1])
class IsNullTests(TestCase):
def test_primary_key(self):
custom = CustomPk.objects.create(name="pk")
null = Related.objects.create()
notnull = Related.objects.create(custom=custom)
self.assertSequenceEqual(
Related.objects.filter(custom__isnull=False), [notnull]
)
self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null])
def test_to_field(self):
apple = Food.objects.create(name="apple")
e1 = Eaten.objects.create(food=apple, meal="lunch")
e2 = Eaten.objects.create(meal="lunch")
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=False),
[e1],
)
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=True),
[e2],
)
class ConditionalTests(TestCase):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
Tag.objects.create(name="t2", parent=t1, category=generic)
t3 = Tag.objects.create(name="t3", parent=t1)
Tag.objects.create(name="t4", parent=t3)
Tag.objects.create(name="t5", parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopX.objects.all()) # Force queryset evaluation with list()
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopZ.objects.all()) # Force queryset evaluation with list()
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by("parent")), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertSequenceEqual(LoopX.objects.order_by("y__x__y__x__id"), [])
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature("requires_explicit_null_ordering_when_grouping")
def test_null_ordering_added(self):
query = Tag.objects.values_list("parent_id", flat=True).order_by().query
query.group_by = ["parent_id"]
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
def test_in_list_limit(self):
# The "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
max_query_params = connection.features.max_query_params
if max_query_params is None or max_query_params >= len(numbers):
Number.objects.bulk_create(Number(num=num) for num in numbers)
for number in [1000, 1001, 2000, len(numbers)]:
with self.subTest(number=number):
self.assertEqual(
Number.objects.filter(num__in=numbers[:number]).count(), number
)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ["one", "two", "three"]
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [
("un", 1, objectas[0]),
("deux", 2, objectas[0]),
("trois", 3, objectas[2]),
]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [("ein", objectas[2], objectbs[2]), ("zwei", objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux", objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name="deux")
Q2 = Q(objectc__objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__name="ein")
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__objectb__name="trois")
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name="one", objectc__objecta__name="two")
Q2 = Q(
objecta__objectc__name="ein",
objectc__objecta__name="three",
objecta__objectb__name="trois",
)
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
"""
Can create an instance of a model with only the PK field (#17056)."
"""
DumbCategory.objects.create()
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name="apples")
cls.f2 = Food.objects.create(name="oranges")
Eaten.objects.create(food=f1, meal="dinner")
cls.j1 = Job.objects.create(name="Manager")
cls.r1 = Responsibility.objects.create(description="Playing golf")
cls.j2 = Job.objects.create(name="Programmer")
cls.r2 = Responsibility.objects.create(description="Programming")
JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1)
JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2)
def test_to_field(self):
self.assertSequenceEqual(
Food.objects.exclude(eaten__meal="dinner"),
[self.f2],
)
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Playing golf"),
[self.j2],
)
self.assertSequenceEqual(
Responsibility.objects.exclude(jobs__name="Manager"),
[self.r2],
)
def test_ticket14511(self):
alex = Person.objects.get_or_create(name="Alex")[0]
jane = Person.objects.get_or_create(name="Jane")[0]
oracle = Company.objects.get_or_create(name="Oracle")[0]
google = Company.objects.get_or_create(name="Google")[0]
microsoft = Company.objects.get_or_create(name="Microsoft")[0]
intel = Company.objects.get_or_create(name="Intel")[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(
employee=employee, employer=employer, title=title
)
employ(oracle, alex, "Engineer")
employ(oracle, alex, "Developer")
employ(google, alex, "Engineer")
employ(google, alex, "Manager")
employ(microsoft, alex, "Manager")
employ(intel, alex, "Manager")
employ(microsoft, jane, "Developer")
employ(intel, jane, "Manager")
alex_tech_employers = (
alex.employers.filter(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_tech_employers, [google, oracle])
alex_nontech_employers = (
alex.employers.exclude(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft])
def test_exclude_reverse_fk_field_ref(self):
tag = Tag.objects.create()
Note.objects.create(tag=tag, note="note")
annotation = Annotation.objects.create(name="annotation", tag=tag)
self.assertEqual(
Annotation.objects.exclude(tag__note__note=F("name")).get(), annotation
)
def test_exclude_with_circular_fk_relation(self):
self.assertEqual(
ObjectB.objects.exclude(objecta__objectb__name=F("name")).count(), 0
)
def test_subquery_exclude_outerref(self):
qs = JobResponsibilities.objects.filter(
Exists(Responsibility.objects.exclude(jobs=OuterRef("job"))),
)
self.assertTrue(qs.exists())
self.r1.delete()
self.assertFalse(qs.exists())
def test_exclude_nullable_fields(self):
number = Number.objects.create(num=1, other_num=1)
Number.objects.create(num=2, other_num=2, another_num=2)
self.assertSequenceEqual(
Number.objects.exclude(other_num=F("another_num")),
[number],
)
self.assertSequenceEqual(
Number.objects.exclude(num=F("another_num")),
[number],
)
def test_exclude_multivalued_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Programming"),
[self.j1],
)
self.assertIn("exists", captured_queries[0]["sql"].lower())
def test_exclude_subquery(self):
subquery = JobResponsibilities.objects.filter(
responsibility__description="bar",
) | JobResponsibilities.objects.exclude(
job__responsibilities__description="foo",
)
self.assertCountEqual(
Job.objects.annotate(
responsibility=subquery.filter(job=OuterRef("name")).values("id")[:1]
),
[self.j1, self.j2],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_exclude_unsaved_o2o_object(self):
jack = Staff.objects.create(name="jack")
jack_staff = StaffUser.objects.create(staff=jack)
unsaved_object = Staff(name="jane")
self.assertIsNone(unsaved_object.pk)
self.assertSequenceEqual(
StaffUser.objects.exclude(staff=unsaved_object), [jack_staff]
)
def test_exclude_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
company = Company.objects.create(name="Django")
msg = "Passing unsaved model instances to related filters is deprecated."
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.exclude(staff=Staff(name="unsaved"))
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1),
[self.o3],
)
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1).distinct(),
[self.o3],
)
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
[self.o3],
)
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)),
[self.o3],
)
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertSequenceEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
[self.o1],
)
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F("second__onetoonecategory")
).get(),
rel,
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name="i1")
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = "" if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerySetEqual(
NullableName.objects.exclude(name__in=[]),
["i1", none_val],
attrgetter("name"),
)
self.assertQuerySetEqual(
NullableName.objects.exclude(name__in=["i1"]),
[none_val],
attrgetter("name"),
)
self.assertQuerySetEqual(
NullableName.objects.exclude(name__in=["i3"]),
["i1", none_val],
attrgetter("name"),
)
inner_qs = NullableName.objects.filter(name="i1").values_list("name")
self.assertQuerySetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val],
attrgetter("name"),
)
# The inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerySetEqual(
NullableName.objects.exclude(name__in=[None]), ["i1"], attrgetter("name")
)
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name="i1"))),
list(NullableName.objects.filter(Q(name="i1"))),
)
self.assertNotIn(
"IS NOT NULL", str(NullableName.objects.filter(~~Q(name="i1")).query)
)
class EmptyStringsAsNullTest(TestCase):
"""
Filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name="")
def test_direct_exclude(self):
self.assertQuerySetEqual(
NamedCategory.objects.exclude(name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_joined_exclude(self):
self.assertQuerySetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_21001(self):
foo = NamedCategory.objects.create(name="foo")
self.assertQuerySetEqual(
NamedCategory.objects.exclude(name=""), [foo.pk], attrgetter("pk")
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(SimpleTestCase):
class DummyNode:
def as_sql(self, compiler, connection):
return "dummy", []
class MockCompiler:
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ("(dummy AND dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy AND dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()], connector=OR)
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector=OR)
self.assertEqual(w.as_sql(compiler, connection), ("(dummy OR dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy OR dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector=OR)
self.assertEqual(w.as_sql(compiler, connection), ("dummy", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy)", []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
w.negate()
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.connector = OR
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
w = WhereNode(children=[empty_w, NothingNode()], connector=OR)
with self.assertRaises(FullResultSet):
w.as_sql(compiler, connection)
w = WhereNode(children=[empty_w, NothingNode()], connector=AND)
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
class QuerySetExceptionTests(SimpleTestCase):
def test_invalid_order_by(self):
msg = "Cannot resolve keyword '*' into field. Choices are: created, id, name"
with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by("*")
def test_invalid_order_by_raw_column_alias(self):
msg = (
"Cannot resolve keyword 'queries_author.name' into field. Choices "
"are: cover, created, creator, creator_id, id, modified, name, "
"note, note_id, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Item.objects.values("creator__name").order_by("queries_author.name")
def test_invalid_queryset_model(self):
msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".'
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.filter(extra=Article.objects.all()))
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name="foo")
d2 = ModelD.objects.create(name="bar")
cls.a1 = ModelA.objects.create(name="a1", d=cls.d1)
c = ModelC.objects.create(name="c")
b = ModelB.objects.create(name="b", c=c)
cls.a2 = ModelA.objects.create(name="a2", b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = Q(d__name="foo") | Q(b__name="foo") | Q(b__c__name="foo")
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count("INNER JOIN"), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note="n", misc="m")
e = ExtraInfo.objects.create(info="info", note=n)
a = Author.objects.create(name="Author1", num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name="Foo", creator=a)
r2 = Report.objects.create(name="Bar")
Report.objects.create(name="Bar", creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name="Foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count(" JOIN "), 2)
self.assertSequenceEqual(qs.order_by("name"), [r2, r1])
def test_ticket_21748(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
i3 = Identifier.objects.create(name="i3")
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertSequenceEqual(
Identifier.objects.filter(program=None, channel=None), [i3]
)
self.assertSequenceEqual(
Identifier.objects.exclude(program=None, channel=None).order_by("name"),
[i1, i2],
)
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(
~Q(program__id=p1.id, channel__id=c1.id)
).order_by("pk")
qs1_filter = Identifier.objects.filter(
program__id=p1.id, channel__id=c1.id
).order_by("pk")
self.assertQuerySetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(2, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneg. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
).order_by("pk")
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
).order_by("pk")
self.assertQuerySetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(1, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
).order_by("pk")
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
).order_by("pk")
self.assertQuerySetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count("JOIN"), str(qs2.query).count("JOIN"))
self.assertEqual(0, str(qs1.query).count("INNER JOIN"))
self.assertEqual(
str(qs1.query).count("INNER JOIN"), str(qs2.query).count("INNER JOIN")
)
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# We don't accidentally trim reverse joins - we can't know if there is
# anything on the other side of the join, so trimming reverse joins
# can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn("INNER JOIN", str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
The queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name="foo").filter(tag__name="bar")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name="foo").select_related("tag")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name="foo").annotate(cnt=Count("tag__name"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name="foo") | Q(tag__name="bar"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name="foo").order_by("tag__name")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name="foo").filter(member__name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name="r4").filter(report__name="r1")
self.assertEqual(str(qs.query).count("JOIN"), 2)
def test_inverted_q_across_relations(self):
"""
When a trimmable join is specified in the query (here school__), the
ORM detects it and removes unnecessary joins. The set of reusable joins
are updated after trimming the query so that other lookups don't
consider that the outer query's filters are in effect for the subquery
(#26551).
"""
springfield_elementary = School.objects.create()
hogward = School.objects.create()
Student.objects.create(school=springfield_elementary)
hp = Student.objects.create(school=hogward)
Classroom.objects.create(school=hogward, name="Potion")
Classroom.objects.create(school=springfield_elementary, name="Main")
qs = Student.objects.filter(
~(
Q(school__classroom__name="Main")
& Q(school__classroom__has_blackboard=None)
)
)
self.assertSequenceEqual(qs, [hp])
class DisjunctionPromotionTests(TestCase):
def test_disjunction_promotion_select_related(self):
fk1 = FK1.objects.create(f1="f1", f2="f2")
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(" JOIN "), 0)
qs = qs.select_related("a", "b")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 0)
self.assertEqual(str(qs.query).count(" LEFT OUTER JOIN "), 2)
with self.assertNumQueries(1):
self.assertSequenceEqual(qs, [basea])
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f2="foo")).filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = qs.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") | (Q(b__f1="foo") & Q(a__f1="bar")))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(
(Q(a__f1="foo") | Q(b__f1="foo")) & (Q(a__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
qs = BaseA.objects.filter(
Q(a__f1="foo") | Q(a__f1="bar") & (Q(b__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F("b__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(
Q(a__f1=F("b__f1")) | Q(a__f2=F("b__f2")) | Q(c__f1="foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
i_extra = Identifier.objects.create(name="extra")
i_program = Identifier.objects.create(name="program")
program = Program.objects.create(identifier=i_program)
i_channel = Identifier.objects.create(name="channel")
channel = Channel.objects.create(identifier=i_channel)
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=channel).order_by("name"),
[i_channel, i_extra],
)
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=None).order_by("name"),
[i_program],
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text="pg3")
pg2 = Page.objects.create(text="pg2")
pg1 = Page.objects.create(text="pg1")
pa1 = Paragraph.objects.create(text="pa1")
pa1.page.set([pg1, pg2])
pa2 = Paragraph.objects.create(text="pa2")
pa2.page.set([pg2, pg3])
pa3 = Paragraph.objects.create(text="pa3")
ch1 = Chapter.objects.create(title="ch1", paragraph=pa1)
ch2 = Chapter.objects.create(title="ch2", paragraph=pa2)
ch3 = Chapter.objects.create(title="ch3", paragraph=pa3)
b1 = Book.objects.create(title="b1", chapter=ch1)
b2 = Book.objects.create(title="b2", chapter=ch2)
b3 = Book.objects.create(title="b3", chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text="pg1")
self.assertNotIn("IS NOT NULL", str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data="foo")
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data="bar", parent=my1)
parents = MyObject.objects.filter(parent=F("id"))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F("id"))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name="foo")
a1 = Annotation.objects.create(tag=t, name="a1")
a2 = Annotation.objects.create(tag=t, name="a2")
a3 = Annotation.objects.create(tag=t, name="a3")
n = Note.objects.create(note="foo", misc="bar")
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(SimpleTestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name="")
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn("LEFT OUTER JOIN", str(qs.query))
else:
self.assertNotIn("LEFT OUTER JOIN", str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# If a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertSequenceEqual(
Order.objects.filter(items__in=OrderItem.objects.values_list("status")),
[o1],
)
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data="foo")
lfa2 = LeafA.objects.create(data="bar")
lfb1 = LeafB.objects.create(data="lfb1")
lfb2 = LeafB.objects.create(data="lfb2")
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data="foo").values_list("pk", flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list("b__id", flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertSequenceEqual(qs, [lfb1])
class Ticket18785Tests(SimpleTestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = (
Item.objects.exclude(note__isnull=False)
.filter(name="something", creator__extra__isnull=True)
.order_by()
)
self.assertEqual(1, str(qs.query).count("INNER JOIN"))
self.assertEqual(0, str(qs.query).count("OUTER JOIN"))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page)
self.assertSequenceEqual(sentences_not_in_pub, [book2])
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertSequenceEqual(qs, [p1])
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
cls.c = ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)
):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = [self.oa]
out_b = [self.ob, self.pob1]
out_c = [self.c]
# proxy model objects
self.assertSequenceEqual(
ObjectB.objects.filter(objecta=self.poa).order_by("name"), out_b
)
self.assertSequenceEqual(
ObjectA.objects.filter(objectb__in=self.pob).order_by("pk"), out_a * 2
)
# child objects
self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by("name"),
out_b,
)
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by(
"name"
),
out_b,
)
# parent objects
self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# QuerySet related object type checking shouldn't issue queries
# (the querysets aren't evaluated here, hence zero queries) (#23266).
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
ValueQuerySets are not checked for compatibility with the lookup field.
"""
# Make sure the num and objecta field values match.
ob = ObjectB.objects.get(name="ob")
ob.num = ob.objecta.pk
ob.save()
pob = ObjectB.objects.get(name="pob")
pob.num = pob.objecta.pk
pob.save()
self.assertSequenceEqual(
ObjectB.objects.filter(
objecta__in=ObjectB.objects.values_list("num")
).order_by("pk"),
[ob, pob],
)
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data="s1")
s2 = SharedConnection.objects.create(data="s2")
s3 = SharedConnection.objects.create(data="s3")
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3]
)
self.assertSequenceEqual(
SharedConnection.objects.order_by("-pointera__connection", "pk"),
expected_ordering,
)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name="jackstaff")
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name="jillstaff")
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
"creator__staffuser__staff", "owner__staffuser__staff"
)
self.assertEqual(str(qs.query).count(" JOIN "), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(
task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff,
)
self.assertEqual(
task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff,
)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related("parent").defer("parent__created")
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(" INNER JOIN ", str(qs.query))
qs = qs.values("parent__parent__id")
self.assertIn(" INNER JOIN ", str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values("parent__parent__id")
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values("objecta__name")
self.assertIn(" INNER JOIN ", str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(" LEFT OUTER JOIN %s" % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name="sc1", name="sc1")
sc2 = SpecialCategory.objects.create(special_name="sc2", name="sc2")
sc3 = SpecialCategory.objects.create(special_name="sc3", name="sc3")
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertSequenceEqual(
SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by("name"),
[sc2, sc3],
)
self.assertSequenceEqual(
SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]
)
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id="cpt1", tag="cpt1")
cp1 = CustomPk.objects.create(name="cp1", extra="extra")
cp1.custompktag_set.add(cpt1)
self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1])
self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1])
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F("school")))
self.assertSequenceEqual(queryset, [st2])
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
# See also #24090.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1
)
complex_q = Q(
pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000
/ F("ticket23605b__modelc_fk__field_c0")
)
&
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True)
& ~Q(
ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True)
& Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
)
)
).filter(ticket23605b__field_b1=True)
)
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertSequenceEqual(qs1, [a1])
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertSequenceEqual(qs2, [a2])
class TestTicket24279(TestCase):
def test_ticket_24278(self):
School.objects.create()
qs = School.objects.filter(Q(pk__in=()) | Q())
self.assertSequenceEqual(qs, [])
class TestInvalidValuesRelation(SimpleTestCase):
def test_invalid_values(self):
msg = "Field 'id' expected a number but got 'abc'."
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag="abc")
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag__in=[123, "abc"])
class TestTicket24605(TestCase):
def test_ticket_24605(self):
"""
Subquery table names should be quoted.
"""
i1 = Individual.objects.create(alive=True)
RelatedIndividual.objects.create(related=i1)
i2 = Individual.objects.create(alive=False)
RelatedIndividual.objects.create(related=i2)
i3 = Individual.objects.create(alive=True)
i4 = Individual.objects.create(alive=False)
self.assertSequenceEqual(
Individual.objects.filter(
Q(alive=False), Q(related_individual__isnull=True)
),
[i4],
)
self.assertSequenceEqual(
Individual.objects.exclude(
Q(alive=False), Q(related_individual__isnull=True)
).order_by("pk"),
[i1, i2, i3],
)
class Ticket23622Tests(TestCase):
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_23622(self):
"""
Make sure __pk__in and __in work the same for related fields when
using a distinct on subquery.
"""
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=0.0)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=123,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=23,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=234,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=12,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=567,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=76,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=7,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=56,
field_b1=True,
modelc_fk=c1,
)
qx = Q(
ticket23605b__pk__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
qy = Q(
ticket23605b__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
self.assertEqual(
set(Ticket23605A.objects.filter(qx).values_list("pk", flat=True)),
set(Ticket23605A.objects.filter(qy).values_list("pk", flat=True)),
)
self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
|
023eea5f94199f8cf281b9304d9405e088532b86539576416c8fc135a303a335 | from datetime import datetime
from django.core.exceptions import FieldError
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import BooleanField, CharField, F, Q
from django.db.models.expressions import (
Col,
Exists,
ExpressionWrapper,
Func,
RawSQL,
Value,
)
from django.db.models.fields.related_lookups import RelatedIsNull
from django.db.models.functions import Lower
from django.db.models.lookups import Exact, GreaterThan, IsNull, LessThan
from django.db.models.sql.constants import SINGLE
from django.db.models.sql.query import JoinPromoter, Query, get_field_names_from_opts
from django.db.models.sql.where import AND, OR
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import Author, Item, ObjectC, Ranking
class TestQuery(SimpleTestCase):
def test_simple_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field("num"))
def test_non_alias_cols_query(self):
query = Query(Author, alias_cols=False)
where = query.build_where(Q(num__gt=2, name__isnull=False) | Q(num__lt=F("id")))
name_isnull_lookup, num_gt_lookup = where.children[0].children
self.assertIsInstance(num_gt_lookup, GreaterThan)
self.assertIsInstance(num_gt_lookup.lhs, Col)
self.assertIsNone(num_gt_lookup.lhs.alias)
self.assertIsInstance(name_isnull_lookup, IsNull)
self.assertIsInstance(name_isnull_lookup.lhs, Col)
self.assertIsNone(name_isnull_lookup.lhs.alias)
num_lt_lookup = where.children[1]
self.assertIsInstance(num_lt_lookup, LessThan)
self.assertIsInstance(num_lt_lookup.rhs, Col)
self.assertIsNone(num_lt_lookup.rhs.alias)
self.assertIsInstance(num_lt_lookup.lhs, Col)
self.assertIsNone(num_lt_lookup.lhs.alias)
def test_complex_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2) | Q(num__lt=0))
self.assertEqual(where.connector, OR)
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field("num"))
lookup = where.children[1]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.rhs, 0)
self.assertEqual(lookup.lhs.target, Author._meta.get_field("num"))
def test_multiple_fields(self):
query = Query(Item, alias_cols=False)
where = query.build_where(Q(modified__gt=F("created")))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertIsInstance(lookup.rhs, Col)
self.assertIsNone(lookup.rhs.alias)
self.assertIsInstance(lookup.lhs, Col)
self.assertIsNone(lookup.lhs.alias)
self.assertEqual(lookup.rhs.target, Item._meta.get_field("created"))
self.assertEqual(lookup.lhs.target, Item._meta.get_field("modified"))
def test_transform(self):
query = Query(Author, alias_cols=False)
with register_lookup(CharField, Lower):
where = query.build_where(~Q(name__lower="foo"))
lookup = where.children[0]
self.assertIsInstance(lookup, Exact)
self.assertIsInstance(lookup.lhs, Lower)
self.assertIsInstance(lookup.lhs.lhs, Col)
self.assertIsNone(lookup.lhs.lhs.alias)
self.assertEqual(lookup.lhs.lhs.target, Author._meta.get_field("name"))
def test_negated_nullable(self):
query = Query(Item)
where = query.build_where(~Q(modified__lt=datetime(2017, 1, 1)))
self.assertTrue(where.negated)
lookup = where.children[0]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.lhs.target, Item._meta.get_field("modified"))
lookup = where.children[1]
self.assertIsInstance(lookup, IsNull)
self.assertEqual(lookup.lhs.target, Item._meta.get_field("modified"))
def test_foreign_key(self):
query = Query(Item)
msg = "Joined field references are not permitted in this query"
with self.assertRaisesMessage(FieldError, msg):
query.build_where(Q(creator__num__gt=2))
def test_foreign_key_f(self):
query = Query(Ranking)
with self.assertRaises(FieldError):
query.build_where(Q(rank__gt=F("author__num")))
def test_foreign_key_exclusive(self):
query = Query(ObjectC, alias_cols=False)
where = query.build_where(Q(objecta=None) | Q(objectb=None))
a_isnull = where.children[0]
self.assertIsInstance(a_isnull, RelatedIsNull)
self.assertIsInstance(a_isnull.lhs, Col)
self.assertIsNone(a_isnull.lhs.alias)
self.assertEqual(a_isnull.lhs.target, ObjectC._meta.get_field("objecta"))
b_isnull = where.children[1]
self.assertIsInstance(b_isnull, RelatedIsNull)
self.assertIsInstance(b_isnull.lhs, Col)
self.assertIsNone(b_isnull.lhs.alias)
self.assertEqual(b_isnull.lhs.target, ObjectC._meta.get_field("objectb"))
def test_clone_select_related(self):
query = Query(Item)
query.add_select_related(["creator"])
clone = query.clone()
clone.add_select_related(["note", "creator__extra"])
self.assertEqual(query.select_related, {"creator": {}})
def test_iterable_lookup_value(self):
query = Query(Item)
where = query.build_where(Q(name=["a", "b"]))
name_exact = where.children[0]
self.assertIsInstance(name_exact, Exact)
self.assertEqual(name_exact.rhs, "['a', 'b']")
def test_filter_conditional(self):
query = Query(Item)
where = query.build_where(Func(output_field=BooleanField()))
exact = where.children[0]
self.assertIsInstance(exact, Exact)
self.assertIsInstance(exact.lhs, Func)
self.assertIs(exact.rhs, True)
def test_filter_conditional_join(self):
query = Query(Item)
filter_expr = Func("note__note", output_field=BooleanField())
msg = "Joined field references are not permitted in this query"
with self.assertRaisesMessage(FieldError, msg):
query.build_where(filter_expr)
def test_filter_non_conditional(self):
query = Query(Item)
msg = "Cannot filter against a non-conditional expression."
with self.assertRaisesMessage(TypeError, msg):
query.build_where(Func(output_field=CharField()))
class TestQueryNoModel(TestCase):
def test_rawsql_annotation(self):
query = Query(None)
sql = "%s = 1"
# Wrap with a CASE WHEN expression if a database backend (e.g. Oracle)
# doesn't support boolean expression in SELECT list.
if not connection.features.supports_boolean_expr_in_select_clause:
sql = f"CASE WHEN {sql} THEN 1 ELSE 0 END"
query.add_annotation(RawSQL(sql, (1,), BooleanField()), "_check")
result = query.get_compiler(using=DEFAULT_DB_ALIAS).execute_sql(SINGLE)
self.assertEqual(result[0], 1)
def test_subquery_annotation(self):
query = Query(None)
query.add_annotation(Exists(Item.objects.all()), "_check")
result = query.get_compiler(using=DEFAULT_DB_ALIAS).execute_sql(SINGLE)
self.assertEqual(result[0], 0)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_q_annotation(self):
query = Query(None)
check = ExpressionWrapper(
Q(RawSQL("%s = 1", (1,), BooleanField())) | Q(Exists(Item.objects.all())),
BooleanField(),
)
query.add_annotation(check, "_check")
result = query.get_compiler(using=DEFAULT_DB_ALIAS).execute_sql(SINGLE)
self.assertEqual(result[0], 1)
def test_names_to_path_field(self):
query = Query(None)
query.add_annotation(Value(True), "value")
path, final_field, targets, names = query.names_to_path(["value"], opts=None)
self.assertEqual(path, [])
self.assertIsInstance(final_field, BooleanField)
self.assertEqual(len(targets), 1)
self.assertIsInstance(targets[0], BooleanField)
self.assertEqual(names, [])
def test_names_to_path_field_error(self):
query = Query(None)
msg = "Cannot resolve keyword 'nonexistent' into field."
with self.assertRaisesMessage(FieldError, msg):
query.names_to_path(["nonexistent"], opts=None)
def test_get_field_names_from_opts(self):
self.assertEqual(get_field_names_from_opts(None), set())
class JoinPromoterTest(SimpleTestCase):
def test_repr(self):
self.assertEqual(
repr(JoinPromoter(AND, 3, True)),
"JoinPromoter(connector='AND', num_children=3, negated=True)",
)
|
832b3d222dcd5767864728bdd74e3b594f0434188649953f46eecac282a02e81 | import os
from datetime import date
from django.contrib.sitemaps import Sitemap
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.test import ignore_warnings, modify_settings, override_settings
from django.utils import translation
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.formats import localize
from .base import SitemapTestsBase
from .models import I18nTestModel, TestModel
class HTTPSitemapTests(SitemapTestsBase):
use_sitemap_err_msg = (
"To use sitemaps, either enable the sites framework or pass a "
"Site/RequestSite object in your view."
)
def test_simple_sitemap_index(self):
"A simple sitemap index can be rendered"
response = self.client.get("/simple/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_not_callable(self):
"""A sitemap may not be callable."""
response = self.client.get("/simple-not-callable/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_paged_sitemap(self):
"""A sitemap may have multiple pages."""
response = self.client.get("/simple-paged/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>{0}/simple/sitemap-simple.xml</loc><lastmod>{1}</lastmod></sitemap><sitemap><loc>{0}/simple/sitemap-simple.xml?p=2</loc><lastmod>{1}</lastmod></sitemap>
</sitemapindex>
""".format(
self.base_url, date.today()
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_sitemap_custom_lastmod_index(self):
"A simple sitemap index can be rendered with a custom template"
response = self.client.get("/simple/custom-lastmod-index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_simple_sitemap_section(self):
"A simple sitemap section can be rendered"
response = self.client.get("/simple/sitemap-simple.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_no_section(self):
response = self.client.get("/simple/sitemap-simple2.xml")
self.assertEqual(
str(response.context["exception"]),
"No sitemap available for section: 'simple2'",
)
self.assertEqual(response.status_code, 404)
def test_empty_page(self):
response = self.client.get("/simple/sitemap-simple.xml?p=0")
self.assertEqual(str(response.context["exception"]), "Page 0 empty")
self.assertEqual(response.status_code, 404)
def test_page_not_int(self):
response = self.client.get("/simple/sitemap-simple.xml?p=test")
self.assertEqual(str(response.context["exception"]), "No page 'test'")
self.assertEqual(response.status_code, 404)
def test_simple_sitemap(self):
"A simple sitemap can be rendered"
response = self.client.get("/simple/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_custom_sitemap(self):
"A simple sitemap can be rendered with a custom template"
response = self.client.get("/simple/custom-sitemap.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_last_modified(self):
"Last-Modified header is set correctly"
response = self.client.get("/lastmod/sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 10:00:00 GMT"
)
def test_sitemap_last_modified_date(self):
"""
The Last-Modified header should be support dates (without time).
"""
response = self.client.get("/lastmod/date-sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 00:00:00 GMT"
)
def test_sitemap_last_modified_tz(self):
"""
The Last-Modified header should be converted from timezone aware dates
to GMT.
"""
response = self.client.get("/lastmod/tz-sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 15:00:00 GMT"
)
def test_sitemap_last_modified_missing(self):
"Last-Modified header is missing when sitemap has no lastmod"
response = self.client.get("/generic/sitemap.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemap_last_modified_mixed(self):
"Last-Modified header is omitted when lastmod not on all items"
response = self.client.get("/lastmod-mixed/sitemap.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get("/lastmod-sitemaps/mixed-ascending.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get("/lastmod-sitemaps/mixed-descending.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_ascending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get("/lastmod-sitemaps/ascending.xml")
self.assertEqual(
response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT"
)
def test_sitemaps_lastmod_descending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get("/lastmod-sitemaps/descending.xml")
self.assertEqual(
response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT"
)
def test_sitemap_get_latest_lastmod_none(self):
"""
sitemapindex.lastmod is omitted when Sitemap.lastmod is
callable and Sitemap.get_latest_lastmod is not implemented
"""
response = self.client.get("/lastmod/get-latest-lastmod-none-sitemap.xml")
self.assertNotContains(response, "<lastmod>")
def test_sitemap_get_latest_lastmod(self):
"""
sitemapindex.lastmod is included when Sitemap.lastmod is
attribute and Sitemap.get_latest_lastmod is implemented
"""
response = self.client.get("/lastmod/get-latest-lastmod-sitemap.xml")
self.assertContains(response, "<lastmod>2013-03-13T10:00:00</lastmod>")
def test_sitemap_latest_lastmod_timezone(self):
"""
lastmod datestamp shows timezones if Sitemap.get_latest_lastmod
returns an aware datetime.
"""
response = self.client.get("/lastmod/latest-lastmod-timezone-sitemap.xml")
self.assertContains(response, "<lastmod>2013-03-13T10:00:00-05:00</lastmod>")
def test_localized_priority(self):
"""The priority value should not be localized."""
with translation.override("fr"):
self.assertEqual("0,3", localize(0.3))
# Priorities aren't rendered in localized format.
response = self.client.get("/simple/sitemap.xml")
self.assertContains(response, "<priority>0.5</priority>")
self.assertContains(response, "<lastmod>%s</lastmod>" % date.today())
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
def test_requestsite_sitemap(self):
# Hitting the flatpages sitemap without the sites framework installed
# doesn't raise an exception.
response = self.client.get("/simple/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://testserver/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % date.today()
self.assertXMLEqual(response.content.decode(), expected_content)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_get_urls_no_site_1(self):
"""
Check we get ImproperlyConfigured if we don't pass a site object to
Sitemap.get_urls and no Site objects exist
"""
Site.objects.all().delete()
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_get_urls_no_site_2(self):
"""
Check we get ImproperlyConfigured when we don't pass a site object to
Sitemap.get_urls if Site objects exists, but the sites framework is not
actually installed.
"""
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_item(self):
"""
Check to make sure that the raw item is included with each
Sitemap.get_url() url result.
"""
test_sitemap = Sitemap()
test_sitemap.items = TestModel.objects.order_by("pk").all
def is_testmodel(url):
return isinstance(url["item"], TestModel)
item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls()))
self.assertTrue(item_in_url_info)
def test_cached_sitemap_index(self):
"""
A cached sitemap index can be rendered (#2713).
"""
response = self.client.get("/cached/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/cached/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_x_robots_sitemap(self):
response = self.client.get("/simple/index.xml")
self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive")
response = self.client.get("/simple/sitemap.xml")
self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive")
def test_empty_sitemap(self):
response = self.client.get("/empty/sitemap.xml")
self.assertEqual(response.status_code, 200)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_simple_i18n_sitemap_index(self):
"""
A simple i18n sitemap index can be rendered, without logging variable
lookup errors.
"""
with self.assertNoLogs("django.template", "DEBUG"):
response = self.client.get("/simple/i18n.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>{0}/en/i18n/testmodel/{1}/</loc><changefreq>never</changefreq>"
"<priority>0.5</priority></url><url><loc>{0}/pt/i18n/testmodel/{1}/</loc>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
).format(self.base_url, self.i18n_model.pk)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_i18n_sitemap_index(self):
"""
A i18n sitemap with alternate/hreflang links can be rendered.
"""
response = self.client.get("/alternates/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(
LANGUAGES=(("en", "English"), ("pt", "Portuguese"), ("es", "Spanish"))
)
def test_alternate_i18n_sitemap_limited(self):
"""
A i18n sitemap index with limited languages can be rendered.
"""
response = self.client.get("/limited/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/es/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_i18n_sitemap_xdefault(self):
"""
A i18n sitemap index with x-default can be rendered.
"""
response = self.client.get("/x-default/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_language_for_item_i18n_sitemap(self):
"""
A i18n sitemap index in which item can be chosen to be displayed for a
lang or not.
"""
only_pt = I18nTestModel.objects.create(name="Only for PT")
response = self.client.get("/item-by-lang/i18n.xml")
url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk
expected_urls = (
f"<url><loc>{url}/en/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
f"<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
f"<url><loc>{url}/pt/i18n/testmodel/{only_pt_pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_language_for_item_i18n_sitemap(self):
"""
A i18n sitemap index in which item can be chosen to be displayed for a
lang or not.
"""
only_pt = I18nTestModel.objects.create(name="Only for PT")
response = self.client.get("/item-by-lang-alternates/i18n.xml")
url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk
expected_urls = (
f"<url><loc>{url}/en/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/></url>'
f"<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/></url>'
f"<url><loc>{url}/pt/i18n/testmodel/{only_pt_pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{only_pt_pk}/"/></url>'
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_without_entries(self):
response = self.client.get("/sitemap-without-entries/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n\n'
"</urlset>"
)
self.assertXMLEqual(response.content.decode(), expected_content)
def test_callable_sitemod_partial(self):
"""
Not all items have `lastmod`. Therefore the `Last-Modified` header
is not set by the detail or index sitemap view.
"""
index_response = self.client.get("/callable-lastmod-partial/index.xml")
sitemap_response = self.client.get("/callable-lastmod-partial/sitemap.xml")
self.assertNotIn("Last-Modified", index_response)
self.assertNotIn("Last-Modified", sitemap_response)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc></sitemap>
</sitemapindex>
"""
expected_content_sitemap = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2013-03-13</lastmod></url><url>"
"<loc>http://example.com/location/</loc></url>\n"
"</urlset>"
)
self.assertXMLEqual(index_response.content.decode(), expected_content_index)
self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap)
def test_callable_sitemod_full(self):
"""
All items in the sitemap have `lastmod`. The `Last-Modified` header
is set for the detail and index sitemap view.
"""
index_response = self.client.get("/callable-lastmod-full/index.xml")
sitemap_response = self.client.get("/callable-lastmod-full/sitemap.xml")
self.assertEqual(
index_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT"
)
self.assertEqual(
sitemap_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT"
)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc><lastmod>2014-03-13T10:00:00</lastmod></sitemap>
</sitemapindex>
"""
expected_content_sitemap = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2013-03-13</lastmod></url>"
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2014-03-13</lastmod></url>\n"
"</urlset>"
)
self.assertXMLEqual(index_response.content.decode(), expected_content_index)
self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap)
def test_callable_sitemod_no_items(self):
index_response = self.client.get("/callable-lastmod-no-items/index.xml")
self.assertNotIn("Last-Modified", index_response)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc></sitemap>
</sitemapindex>
"""
self.assertXMLEqual(index_response.content.decode(), expected_content_index)
# RemovedInDjango50Warning
class DeprecatedTests(SitemapTestsBase):
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_sitemap_custom_index_warning(self):
msg = (
"Calling `__str__` on SitemapIndexItem is deprecated, use the `location` "
"attribute instead."
)
with self.assertRaisesMessage(RemovedInDjango50Warning, msg):
self.client.get("/simple/custom-index.xml")
@ignore_warnings(category=RemovedInDjango50Warning)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_sitemap_custom_index(self):
"A simple sitemap index can be rendered with a custom template"
response = self.client.get("/simple/custom-index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % (
self.base_url
)
self.assertXMLEqual(response.content.decode(), expected_content)
|
b0c5aae0cca6112d8204f0e2409267214b8a7e689983aef35ebafa503cb7577d | """Tests for django.db.utils."""
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.db import DEFAULT_DB_ALIAS, ProgrammingError, connection
from django.db.utils import ConnectionHandler, load_backend
from django.test import SimpleTestCase, TestCase
from django.utils.connection import ConnectionDoesNotExist
class ConnectionHandlerTests(SimpleTestCase):
def test_connection_handler_no_databases(self):
"""
Empty DATABASES and empty 'default' settings default to the dummy
backend.
"""
for DATABASES in (
{}, # Empty DATABASES setting.
{"default": {}}, # Empty 'default' database.
):
with self.subTest(DATABASES=DATABASES):
self.assertImproperlyConfigured(DATABASES)
def assertImproperlyConfigured(self, DATABASES):
conns = ConnectionHandler(DATABASES)
self.assertEqual(
conns[DEFAULT_DB_ALIAS].settings_dict["ENGINE"], "django.db.backends.dummy"
)
msg = (
"settings.DATABASES is improperly configured. Please supply the "
"ENGINE value. Check settings documentation for more details."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
conns[DEFAULT_DB_ALIAS].ensure_connection()
def test_no_default_database(self):
DATABASES = {"other": {}}
conns = ConnectionHandler(DATABASES)
msg = "You must define a 'default' database."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
conns["other"].ensure_connection()
def test_databases_property(self):
# The "databases" property is maintained for backwards compatibility
# with 3rd party packages. It should be an alias of the "settings"
# property.
conn = ConnectionHandler({})
self.assertNotEqual(conn.settings, {})
self.assertEqual(conn.settings, conn.databases)
def test_nonexistent_alias(self):
msg = "The connection 'nonexistent' doesn't exist."
conns = ConnectionHandler(
{
DEFAULT_DB_ALIAS: {"ENGINE": "django.db.backends.dummy"},
}
)
with self.assertRaisesMessage(ConnectionDoesNotExist, msg):
conns["nonexistent"]
class DatabaseErrorWrapperTests(TestCase):
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL test")
def test_reraising_backend_specific_database_exception(self):
from django.db.backends.postgresql.psycopg_any import is_psycopg3
with connection.cursor() as cursor:
msg = 'table "X" does not exist'
with self.assertRaisesMessage(ProgrammingError, msg) as cm:
cursor.execute('DROP TABLE "X"')
self.assertNotEqual(type(cm.exception), type(cm.exception.__cause__))
self.assertIsNotNone(cm.exception.__cause__)
if is_psycopg3:
self.assertIsNotNone(cm.exception.__cause__.diag.sqlstate)
self.assertIsNotNone(cm.exception.__cause__.diag.message_primary)
else:
self.assertIsNotNone(cm.exception.__cause__.pgcode)
self.assertIsNotNone(cm.exception.__cause__.pgerror)
class LoadBackendTests(SimpleTestCase):
def test_load_backend_invalid_name(self):
msg = (
"'foo' isn't an available database backend or couldn't be "
"imported. Check the above exception. To use one of the built-in "
"backends, use 'django.db.backends.XXX', where XXX is one of:\n"
" 'mysql', 'oracle', 'postgresql', 'sqlite3'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg) as cm:
load_backend("foo")
self.assertEqual(str(cm.exception.__cause__), "No module named 'foo'")
|
072a26facb7c07b247f84398745858603a3184f924ff5ceda8e59b62c3c9ddb7 | """
Testing using the Test Client
The test client is a class that can act like a simple
browser for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
``Client`` objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the ``Client`` instance.
This is not intended as a replacement for Twill, Selenium, or
other browser automation frameworks - it is here to allow
testing against the contexts and templates produced by a view,
rather than the HTML rendered to the end-user.
"""
import itertools
import pickle
import tempfile
from unittest import mock
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpResponse, HttpResponseNotAllowed
from django.test import (
AsyncRequestFactory,
Client,
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.urls import reverse_lazy
from django.utils.decorators import async_only_middleware
from django.views.generic import RedirectView
from .views import TwoArgException, get_view, post_view, trace_view
def middleware_urlconf(get_response):
def middleware(request):
request.urlconf = "test_client.urls_middleware_urlconf"
return get_response(request)
return middleware
@async_only_middleware
def async_middleware_urlconf(get_response):
async def middleware(request):
request.urlconf = "test_client.urls_middleware_urlconf"
return await get_response(request)
return middleware
@override_settings(ROOT_URLCONF="test_client.urls")
class ClientTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username="testclient", password="password")
cls.u2 = User.objects.create_user(
username="inactive", password="password", is_active=False
)
def test_get_view(self):
"GET a view"
# The data is ignored, but let's check it doesn't crash the system
# anyway.
data = {"var": "\xf2"}
response = self.client.get("/get_view/", data)
# Check some response details
self.assertContains(response, "This is a test")
self.assertEqual(response.context["var"], "\xf2")
self.assertEqual(response.templates[0].name, "GET Template")
def test_pickling_response(self):
tests = ["/cbv_view/", "/get_view/"]
for url in tests:
with self.subTest(url=url):
response = self.client.get(url)
dump = pickle.dumps(response)
response_from_pickle = pickle.loads(dump)
self.assertEqual(repr(response), repr(response_from_pickle))
async def test_pickling_response_async(self):
response = await self.async_client.get("/async_get_view/")
dump = pickle.dumps(response)
response_from_pickle = pickle.loads(dump)
self.assertEqual(repr(response), repr(response_from_pickle))
def test_query_string_encoding(self):
# WSGI requires latin-1 encoded strings.
response = self.client.get("/get_view/?var=1\ufffd")
self.assertEqual(response.context["var"], "1\ufffd")
def test_get_data_none(self):
msg = (
"Cannot encode None for key 'value' in a query string. Did you "
"mean to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.get("/get_view/", {"value": None})
def test_get_post_view(self):
"GET a view that normally expects POSTs"
response = self.client.get("/post_view/", {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Empty GET Template")
self.assertTemplateUsed(response, "Empty GET Template")
self.assertTemplateNotUsed(response, "Empty POST Template")
def test_empty_post(self):
"POST an empty dictionary to a view"
response = self.client.post("/post_view/", {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Empty POST Template")
self.assertTemplateNotUsed(response, "Empty GET Template")
self.assertTemplateUsed(response, "Empty POST Template")
def test_post(self):
"POST some data to a view"
post_data = {"value": 37}
response = self.client.post("/post_view/", post_data)
# Check some response details
self.assertContains(response, "Data received")
self.assertEqual(response.context["data"], "37")
self.assertEqual(response.templates[0].name, "POST Template")
def test_post_data_none(self):
msg = (
"Cannot encode None for key 'value' as POST data. Did you mean "
"to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.post("/post_view/", {"value": None})
def test_json_serialization(self):
"""The test client serializes JSON data."""
methods = ("post", "put", "patch", "delete")
tests = (
({"value": 37}, {"value": 37}),
([37, True], [37, True]),
((37, False), [37, False]),
)
for method in methods:
with self.subTest(method=method):
for data, expected in tests:
with self.subTest(data):
client_method = getattr(self.client, method)
method_name = method.upper()
response = client_method(
"/json_view/", data, content_type="application/json"
)
self.assertContains(response, "Viewing %s page." % method_name)
self.assertEqual(response.context["data"], expected)
def test_json_encoder_argument(self):
"""The test Client accepts a json_encoder."""
mock_encoder = mock.MagicMock()
mock_encoding = mock.MagicMock()
mock_encoder.return_value = mock_encoding
mock_encoding.encode.return_value = '{"value": 37}'
client = self.client_class(json_encoder=mock_encoder)
# Vendored tree JSON content types are accepted.
client.post(
"/json_view/", {"value": 37}, content_type="application/vnd.api+json"
)
self.assertTrue(mock_encoder.called)
self.assertTrue(mock_encoding.encode.called)
def test_put(self):
response = self.client.put("/put_view/", {"foo": "bar"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "PUT Template")
self.assertEqual(response.context["data"], "{'foo': 'bar'}")
self.assertEqual(response.context["Content-Length"], "14")
def test_trace(self):
"""TRACE a view"""
response = self.client.trace("/trace_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["method"], "TRACE")
self.assertEqual(response.templates[0].name, "TRACE Template")
def test_response_headers(self):
"Check the value of HTTP headers returned in a response"
response = self.client.get("/header_view/")
self.assertEqual(response.headers["X-DJANGO-TEST"], "Slartibartfast")
def test_response_attached_request(self):
"""
The returned response has a ``request`` attribute with the originating
environ dict and a ``wsgi_request`` with the originating WSGIRequest.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, "request"))
self.assertTrue(hasattr(response, "wsgi_request"))
for key, value in response.request.items():
self.assertIn(key, response.wsgi_request.environ)
self.assertEqual(response.wsgi_request.environ[key], value)
def test_response_resolver_match(self):
"""
The response contains a ResolverMatch instance.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, "resolver_match"))
def test_response_resolver_match_redirect_follow(self):
"""
The response ResolverMatch instance contains the correct
information when following redirects.
"""
response = self.client.get("/redirect_view/", follow=True)
self.assertEqual(response.resolver_match.url_name, "get_view")
def test_response_resolver_match_regular_view(self):
"""
The response ResolverMatch instance contains the correct
information when accessing a regular view.
"""
response = self.client.get("/get_view/")
self.assertEqual(response.resolver_match.url_name, "get_view")
def test_response_resolver_match_class_based_view(self):
"""
The response ResolverMatch instance can be used to access the CBV view
class.
"""
response = self.client.get("/accounts/")
self.assertIs(response.resolver_match.func.view_class, RedirectView)
@modify_settings(MIDDLEWARE={"prepend": "test_client.tests.middleware_urlconf"})
def test_response_resolver_match_middleware_urlconf(self):
response = self.client.get("/middleware_urlconf_view/")
self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view")
def test_raw_post(self):
"POST raw data (with a content type) to a view"
test_doc = """<?xml version="1.0" encoding="utf-8"?>
<library><book><title>Blink</title><author>Malcolm Gladwell</author></book>
</library>
"""
response = self.client.post(
"/raw_post_view/", test_doc, content_type="text/xml"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Book template")
self.assertEqual(response.content, b"Blink - Malcolm Gladwell")
def test_insecure(self):
"GET a URL through http"
response = self.client.get("/secure_view/", secure=False)
self.assertFalse(response.test_was_secure_request)
self.assertEqual(response.test_server_port, "80")
def test_secure(self):
"GET a URL through https"
response = self.client.get("/secure_view/", secure=True)
self.assertTrue(response.test_was_secure_request)
self.assertEqual(response.test_server_port, "443")
def test_redirect(self):
"GET a URL that redirects elsewhere"
response = self.client.get("/redirect_view/")
self.assertRedirects(response, "/get_view/")
def test_redirect_with_query(self):
"GET a URL that redirects with given GET parameters"
response = self.client.get("/redirect_view/", {"var": "value"})
self.assertRedirects(response, "/get_view/?var=value")
def test_redirect_with_query_ordering(self):
"""assertRedirects() ignores the order of query string parameters."""
response = self.client.get("/redirect_view/", {"var": "value", "foo": "bar"})
self.assertRedirects(response, "/get_view/?var=value&foo=bar")
self.assertRedirects(response, "/get_view/?foo=bar&var=value")
def test_permanent_redirect(self):
"GET a URL that redirects permanently elsewhere"
response = self.client.get("/permanent_redirect_view/")
self.assertRedirects(response, "/get_view/", status_code=301)
def test_temporary_redirect(self):
"GET a URL that does a non-permanent redirect"
response = self.client.get("/temporary_redirect_view/")
self.assertRedirects(response, "/get_view/", status_code=302)
def test_redirect_to_strange_location(self):
"GET a URL that redirects to a non-200 page"
response = self.client.get("/double_redirect_view/")
# The response was a 302, and that the attempt to get the redirection
# location returned 301 when retrieved
self.assertRedirects(
response, "/permanent_redirect_view/", target_status_code=301
)
def test_follow_redirect(self):
"A URL that redirects can be followed to termination."
response = self.client.get("/double_redirect_view/", follow=True)
self.assertRedirects(
response, "/get_view/", status_code=302, target_status_code=200
)
self.assertEqual(len(response.redirect_chain), 2)
def test_follow_relative_redirect(self):
"A URL with a relative redirect can be followed."
response = self.client.get("/accounts/", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
def test_follow_relative_redirect_no_trailing_slash(self):
"A URL with a relative redirect with no trailing slash can be followed."
response = self.client.get("/accounts/no_trailing_slash", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
def test_redirect_to_querystring_only(self):
"""A URL that consists of a querystring only can be followed"""
response = self.client.post("/post_then_get_view/", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/post_then_get_view/")
self.assertEqual(response.content, b"The value of success is true.")
def test_follow_307_and_308_redirect(self):
"""
A 307 or 308 redirect preserves the request method after the redirect.
"""
methods = ("get", "post", "head", "options", "put", "patch", "delete", "trace")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s/" % code, data={"value": "test"}, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/post_view/")
self.assertEqual(response.request["REQUEST_METHOD"], method.upper())
def test_follow_307_and_308_preserves_query_string(self):
methods = ("post", "options", "put", "patch", "delete", "trace")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s_query_string/" % code,
data={"value": "test"},
follow=True,
)
self.assertRedirects(
response, "/post_view/?hello=world", status_code=code
)
self.assertEqual(response.request["QUERY_STRING"], "hello=world")
def test_follow_307_and_308_get_head_query_string(self):
methods = ("get", "head")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s_query_string/" % code,
data={"value": "test"},
follow=True,
)
self.assertRedirects(
response, "/post_view/?hello=world", status_code=code
)
self.assertEqual(response.request["QUERY_STRING"], "value=test")
def test_follow_307_and_308_preserves_post_data(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.post(
"/redirect_view_%s/" % code, data={"value": "test"}, follow=True
)
self.assertContains(response, "test is the value")
def test_follow_307_and_308_preserves_put_body(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.put(
"/redirect_view_%s/?to=/put_view/" % code, data="a=b", follow=True
)
self.assertContains(response, "a=b is the body")
def test_follow_307_and_308_preserves_get_params(self):
data = {"var": 30, "to": "/get_view/"}
for code in (307, 308):
with self.subTest(code=code):
response = self.client.get(
"/redirect_view_%s/" % code, data=data, follow=True
)
self.assertContains(response, "30 is the value")
def test_redirect_http(self):
"""GET a URL that redirects to an HTTP URI."""
response = self.client.get("/http_redirect_view/", follow=True)
self.assertFalse(response.test_was_secure_request)
def test_redirect_https(self):
"""GET a URL that redirects to an HTTPS URI."""
response = self.client.get("/https_redirect_view/", follow=True)
self.assertTrue(response.test_was_secure_request)
def test_notfound_response(self):
"GET a URL that responds as '404:Not Found'"
response = self.client.get("/bad_view/")
self.assertContains(response, "MAGIC", status_code=404)
def test_valid_form(self):
"POST valid data to a form"
post_data = {
"text": "Hello World",
"email": "[email protected]",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view/", post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Valid POST Template")
def test_valid_form_with_hints(self):
"GET a form, providing hints in the GET data"
hints = {"text": "Hello World", "multi": ("b", "c", "e")}
response = self.client.get("/form_view/", data=hints)
# The multi-value data has been rolled out ok
self.assertContains(response, "Select a valid choice.", 0)
self.assertTemplateUsed(response, "Form GET Template")
def test_incomplete_data_form(self):
"POST incomplete data to a form"
post_data = {"text": "Hello World", "value": 37}
response = self.client.post("/form_view/", post_data)
self.assertContains(response, "This field is required.", 3)
self.assertTemplateUsed(response, "Invalid POST Template")
form = response.context["form"]
self.assertFormError(form, "email", "This field is required.")
self.assertFormError(form, "single", "This field is required.")
self.assertFormError(form, "multi", "This field is required.")
def test_form_error(self):
"POST erroneous data to a form"
post_data = {
"text": "Hello World",
"email": "not an email address",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view/", post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(
response.context["form"], "email", "Enter a valid email address."
)
def test_valid_form_with_template(self):
"POST valid data to a form using multiple templates"
post_data = {
"text": "Hello World",
"email": "[email protected]",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data OK")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Valid POST Template")
def test_incomplete_data_form_with_template(self):
"POST incomplete data to a form using multiple templates"
post_data = {"text": "Hello World", "value": 37}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data has errors")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Invalid POST Template")
form = response.context["form"]
self.assertFormError(form, "email", "This field is required.")
self.assertFormError(form, "single", "This field is required.")
self.assertFormError(form, "multi", "This field is required.")
def test_form_error_with_template(self):
"POST erroneous data to a form using multiple templates"
post_data = {
"text": "Hello World",
"email": "not an email address",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data has errors")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(
response.context["form"], "email", "Enter a valid email address."
)
def test_unknown_page(self):
"GET an invalid URL"
response = self.client.get("/unknown_view/")
# The response was a 404
self.assertEqual(response.status_code, 404)
def test_url_parameters(self):
"Make sure that URL ;-parameters are not stripped."
response = self.client.get("/unknown_view/;some-parameter")
# The path in the response includes it (ignore that it's a 404)
self.assertEqual(response.request["PATH_INFO"], "/unknown_view/;some-parameter")
def test_view_with_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
@override_settings(
INSTALLED_APPS=["django.contrib.auth"],
SESSION_ENGINE="django.contrib.sessions.backends.file",
)
def test_view_with_login_when_sessions_app_is_not_installed(self):
self.test_view_with_login()
def test_view_with_force_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_method_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/login_protected_method_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_method_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_method_force_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/login_protected_method_view/"
)
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_method_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertRedirects(
response,
"/accounts/login/?redirect_to=/login_protected_view_custom_redirect/",
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_force_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertRedirects(
response,
"/accounts/login/?redirect_to=/login_protected_view_custom_redirect/",
)
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_bad_login(self):
"Request a page that is protected with @login, but use bad credentials"
login = self.client.login(username="otheruser", password="nopassword")
self.assertFalse(login)
def test_view_with_inactive_login(self):
"""
An inactive user may login if the authenticate backend allows it.
"""
credentials = {"username": "inactive", "password": "password"}
self.assertFalse(self.client.login(**credentials))
with self.settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.AllowAllUsersModelBackend"
]
):
self.assertTrue(self.client.login(**credentials))
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_view_with_inactive_force_login(self):
"Request a page that is protected with @login, but use an inactive login"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(
self.u2, backend="django.contrib.auth.backends.AllowAllUsersModelBackend"
)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "inactive")
def test_logout(self):
"Request a logout after logging in"
# Log in
self.client.login(username="testclient", password="password")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
def test_logout_with_force_login(self):
"Request a logout after logging in"
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"test_client.auth_backends.TestClientBackend",
],
)
def test_force_login_with_backend(self):
"""
Request a page that is protected with @login_required when using
force_login() and passing a backend.
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(
self.u1, backend="test_client.auth_backends.TestClientBackend"
)
self.assertEqual(self.u1.backend, "test_client.auth_backends.TestClientBackend")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"test_client.auth_backends.TestClientBackend",
],
)
def test_force_login_without_backend(self):
"""
force_login() without passing a backend and with multiple backends
configured should automatically use the first backend.
"""
self.client.force_login(self.u1)
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend")
@override_settings(
AUTHENTICATION_BACKENDS=[
"test_client.auth_backends.BackendWithoutGetUserMethod",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_force_login_with_backend_missing_get_user(self):
"""
force_login() skips auth backends without a get_user() method.
"""
self.client.force_login(self.u1)
self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend")
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies")
def test_logout_cookie_sessions(self):
self.test_logout()
def test_view_with_permissions(self):
"Request a page that is protected with @permission_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/permission_protected_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 302.
response = self.client.get("/permission_protected_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_view/"
)
# TODO: Log in with right permissions and request the page again
def test_view_with_permissions_exception(self):
"""
Request a page that is protected with @permission_required but raises
an exception.
"""
# Get the page without logging in. Should result in 403.
response = self.client.get("/permission_protected_view_exception/")
self.assertEqual(response.status_code, 403)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 403.
response = self.client.get("/permission_protected_view_exception/")
self.assertEqual(response.status_code, 403)
def test_view_with_method_permissions(self):
"Request a page that is protected with a @permission_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/permission_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_method_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 302.
response = self.client.get("/permission_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_method_view/"
)
# TODO: Log in with right permissions and request the page again
def test_external_redirect(self):
response = self.client.get("/django_project_redirect/")
self.assertRedirects(
response, "https://www.djangoproject.com/", fetch_redirect_response=False
)
def test_external_redirect_without_trailing_slash(self):
"""
Client._handle_redirects() with an empty path.
"""
response = self.client.get("/no_trailing_slash_external_redirect/", follow=True)
self.assertRedirects(response, "https://testserver")
def test_external_redirect_with_fetch_error_msg(self):
"""
assertRedirects without fetch_redirect_response=False raises
a relevant ValueError rather than a non-descript AssertionError.
"""
response = self.client.get("/django_project_redirect/")
msg = (
"The test client is unable to fetch remote URLs (got "
"https://www.djangoproject.com/). If the host is served by Django, "
"add 'www.djangoproject.com' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertRedirects(response, "https://www.djangoproject.com/")
def test_session_modifying_view(self):
"Request a page that modifies the session"
# Session value isn't set initially
with self.assertRaises(KeyError):
self.client.session["tobacconist"]
self.client.post("/session_view/")
# The session was modified
self.assertEqual(self.client.session["tobacconist"], "hovercraft")
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE="django.contrib.sessions.backends.file",
)
def test_sessions_app_is_not_installed(self):
self.test_session_modifying_view()
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE="django.contrib.sessions.backends.nonexistent",
)
def test_session_engine_is_invalid(self):
with self.assertRaisesMessage(ImportError, "nonexistent"):
self.test_session_modifying_view()
def test_view_with_exception(self):
"Request a page that is known to throw an error"
with self.assertRaises(KeyError):
self.client.get("/broken_view/")
def test_exc_info(self):
client = Client(raise_request_exception=False)
response = client.get("/broken_view/")
self.assertEqual(response.status_code, 500)
exc_type, exc_value, exc_traceback = response.exc_info
self.assertIs(exc_type, KeyError)
self.assertIsInstance(exc_value, KeyError)
self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'")
self.assertIsNotNone(exc_traceback)
def test_exc_info_none(self):
response = self.client.get("/get_view/")
self.assertIsNone(response.exc_info)
def test_mail_sending(self):
"Mail is redirected to a dummy outbox during test setup"
response = self.client.get("/mail_sending_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test message")
self.assertEqual(mail.outbox[0].body, "This is a test email")
self.assertEqual(mail.outbox[0].from_email, "[email protected]")
self.assertEqual(mail.outbox[0].to[0], "[email protected]")
self.assertEqual(mail.outbox[0].to[1], "[email protected]")
def test_reverse_lazy_decodes(self):
"reverse_lazy() works in the test client"
data = {"var": "data"}
response = self.client.get(reverse_lazy("get_view"), data)
# Check some response details
self.assertContains(response, "This is a test")
def test_relative_redirect(self):
response = self.client.get("/accounts/")
self.assertRedirects(response, "/accounts/login/")
def test_relative_redirect_no_trailing_slash(self):
response = self.client.get("/accounts/no_trailing_slash")
self.assertRedirects(response, "/accounts/login/")
def test_mass_mail_sending(self):
"Mass mail is redirected to a dummy outbox during test setup"
response = self.client.get("/mass_mail_sending_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "First Test message")
self.assertEqual(mail.outbox[0].body, "This is the first test email")
self.assertEqual(mail.outbox[0].from_email, "[email protected]")
self.assertEqual(mail.outbox[0].to[0], "[email protected]")
self.assertEqual(mail.outbox[0].to[1], "[email protected]")
self.assertEqual(mail.outbox[1].subject, "Second Test message")
self.assertEqual(mail.outbox[1].body, "This is the second test email")
self.assertEqual(mail.outbox[1].from_email, "[email protected]")
self.assertEqual(mail.outbox[1].to[0], "[email protected]")
self.assertEqual(mail.outbox[1].to[1], "[email protected]")
def test_exception_following_nested_client_request(self):
"""
A nested test client request shouldn't clobber exception signals from
the outer client request.
"""
with self.assertRaisesMessage(Exception, "exception message"):
self.client.get("/nesting_exception_view/")
def test_response_raises_multi_arg_exception(self):
"""A request may raise an exception with more than one required arg."""
with self.assertRaises(TwoArgException) as cm:
self.client.get("/two_arg_exception/")
self.assertEqual(cm.exception.args, ("one", "two"))
def test_uploading_temp_file(self):
with tempfile.TemporaryFile() as test_file:
response = self.client.post("/upload_view/", data={"temp_file": test_file})
self.assertEqual(response.content, b"temp_file")
def test_uploading_named_temp_file(self):
with tempfile.NamedTemporaryFile() as test_file:
response = self.client.post(
"/upload_view/",
data={"named_temp_file": test_file},
)
self.assertEqual(response.content, b"named_temp_file")
@override_settings(
MIDDLEWARE=["django.middleware.csrf.CsrfViewMiddleware"],
ROOT_URLCONF="test_client.urls",
)
class CSRFEnabledClientTests(SimpleTestCase):
def test_csrf_enabled_client(self):
"A client can be instantiated with CSRF checks enabled"
csrf_client = Client(enforce_csrf_checks=True)
# The normal client allows the post
response = self.client.post("/post_view/", {})
self.assertEqual(response.status_code, 200)
# The CSRF-enabled client rejects it
response = csrf_client.post("/post_view/", {})
self.assertEqual(response.status_code, 403)
class CustomTestClient(Client):
i_am_customized = "Yes"
class CustomTestClientTest(SimpleTestCase):
client_class = CustomTestClient
def test_custom_test_client(self):
"""A test case can specify a custom class for self.client."""
self.assertIs(hasattr(self.client, "i_am_customized"), True)
def _generic_view(request):
return HttpResponse(status=200)
@override_settings(ROOT_URLCONF="test_client.urls")
class RequestFactoryTest(SimpleTestCase):
"""Tests for the request factory."""
# A mapping between names of HTTP/1.1 methods and their test views.
http_methods_and_views = (
("get", get_view),
("post", post_view),
("put", _generic_view),
("patch", _generic_view),
("delete", _generic_view),
("head", _generic_view),
("options", _generic_view),
("trace", trace_view),
)
request_factory = RequestFactory()
def test_request_factory(self):
"""The request factory implements all the HTTP/1.1 methods."""
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method("/somewhere/")
response = view(request)
self.assertEqual(response.status_code, 200)
def test_get_request_from_factory(self):
"""
The request factory returns a templated response for a GET request.
"""
request = self.request_factory.get("/somewhere/")
response = get_view(request)
self.assertContains(response, "This is a test")
def test_trace_request_from_factory(self):
"""The request factory returns an echo response for a TRACE request."""
url_path = "/somewhere/"
request = self.request_factory.trace(url_path)
response = trace_view(request)
protocol = request.META["SERVER_PROTOCOL"]
echoed_request_line = "TRACE {} {}".format(url_path, protocol)
self.assertContains(response, echoed_request_line)
def test_request_factory_default_headers(self):
request = RequestFactory(
headers={
"authorization": "Bearer faketoken",
"x-another-header": "some other value",
}
).get("/somewhere/")
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
request = RequestFactory(
headers={
"Authorization": "Bearer faketoken",
"X-Another-Header": "some other value",
}
).get("/somewhere/")
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
def test_request_factory_sets_headers(self):
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method(
"/somewhere/",
headers={
"authorization": "Bearer faketoken",
"x-another-header": "some other value",
},
)
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
request = method(
"/somewhere/",
headers={
"Authorization": "Bearer faketoken",
"X-Another-Header": "some other value",
},
)
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
@override_settings(ROOT_URLCONF="test_client.urls")
class AsyncClientTest(TestCase):
async def test_response_resolver_match(self):
response = await self.async_client.get("/async_get_view/")
self.assertTrue(hasattr(response, "resolver_match"))
self.assertEqual(response.resolver_match.url_name, "async_get_view")
@modify_settings(
MIDDLEWARE={"prepend": "test_client.tests.async_middleware_urlconf"},
)
async def test_response_resolver_match_middleware_urlconf(self):
response = await self.async_client.get("/middleware_urlconf_view/")
self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view")
async def test_follow_parameter_not_implemented(self):
msg = "AsyncClient request methods do not accept the follow parameter."
tests = (
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
)
for method_name in tests:
with self.subTest(method=method_name):
method = getattr(self.async_client, method_name)
with self.assertRaisesMessage(NotImplementedError, msg):
await method("/redirect_view/", follow=True)
async def test_get_data(self):
response = await self.async_client.get("/get_view/", {"var": "val"})
self.assertContains(response, "This is a test. val is the value.")
async def test_post_data(self):
response = await self.async_client.post("/post_view/", {"value": 37})
self.assertContains(response, "Data received: 37 is the value.")
async def test_body_read_on_get_data(self):
response = await self.async_client.get("/post_view/")
self.assertContains(response, "Viewing GET page.")
@override_settings(ROOT_URLCONF="test_client.urls")
class AsyncRequestFactoryTest(SimpleTestCase):
request_factory = AsyncRequestFactory()
async def test_request_factory(self):
tests = (
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
)
for method_name in tests:
with self.subTest(method=method_name):
async def async_generic_view(request):
if request.method.lower() != method_name:
return HttpResponseNotAllowed(method_name)
return HttpResponse(status=200)
method = getattr(self.request_factory, method_name)
request = method("/somewhere/")
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
async def test_request_factory_data(self):
async def async_generic_view(request):
return HttpResponse(status=200, content=request.body)
request = self.request_factory.post(
"/somewhere/",
data={"example": "data"},
content_type="application/json",
)
self.assertEqual(request.headers["content-length"], "19")
self.assertEqual(request.headers["content-type"], "application/json")
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"example": "data"}')
async def test_request_limited_read(self):
tests = ["GET", "POST"]
for method in tests:
with self.subTest(method=method):
request = self.request_factory.generic(
method,
"/somewhere",
)
self.assertEqual(request.read(200), b"")
def test_request_factory_sets_headers(self):
request = self.request_factory.get(
"/somewhere/",
AUTHORIZATION="Bearer faketoken",
X_ANOTHER_HEADER="some other value",
)
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
request = self.request_factory.get(
"/somewhere/",
headers={
"Authorization": "Bearer faketoken",
"X-Another-Header": "some other value",
},
)
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
def test_request_factory_query_string(self):
request = self.request_factory.get("/somewhere/", {"example": "data"})
self.assertNotIn("Query-String", request.headers)
self.assertEqual(request.GET["example"], "data")
|
1208bb44dd927038fca6ca365db5e862daf957484af7185804805f092c0f58b2 | from django.core.exceptions import ImproperlyConfigured
from django.core.handlers.wsgi import WSGIHandler, WSGIRequest, get_script_name
from django.core.signals import request_finished, request_started
from django.db import close_old_connections, connection
from django.test import (
RequestFactory,
SimpleTestCase,
TransactionTestCase,
override_settings,
)
class HandlerTests(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
def test_middleware_initialized(self):
handler = WSGIHandler()
self.assertIsNotNone(handler._middleware_chain)
def test_bad_path_info(self):
"""
A non-UTF-8 path populates PATH_INFO with an URL-encoded path and
produces a 404.
"""
environ = self.request_factory.get("/").environ
environ["PATH_INFO"] = "\xed"
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# The path of the request will be encoded to '/%ED'.
self.assertEqual(response.status_code, 404)
def test_non_ascii_query_string(self):
"""
Non-ASCII query strings are properly decoded (#20530, #22996).
"""
environ = self.request_factory.get("/").environ
raw_query_strings = [
b"want=caf%C3%A9", # This is the proper way to encode 'café'
b"want=caf\xc3\xa9", # UA forgot to quote bytes
b"want=caf%E9", # UA quoted, but not in UTF-8
# UA forgot to convert Latin-1 to UTF-8 and to quote (typical of
# MSIE).
b"want=caf\xe9",
]
got = []
for raw_query_string in raw_query_strings:
# Simulate http.server.BaseHTTPRequestHandler.parse_request
# handling of raw request.
environ["QUERY_STRING"] = str(raw_query_string, "iso-8859-1")
request = WSGIRequest(environ)
got.append(request.GET["want"])
# %E9 is converted to the Unicode replacement character by parse_qsl
self.assertEqual(got, ["café", "café", "caf\ufffd", "café"])
def test_non_ascii_cookie(self):
"""Non-ASCII cookies set in JavaScript are properly decoded (#20557)."""
environ = self.request_factory.get("/").environ
raw_cookie = 'want="café"'.encode("utf-8").decode("iso-8859-1")
environ["HTTP_COOKIE"] = raw_cookie
request = WSGIRequest(environ)
self.assertEqual(request.COOKIES["want"], "café")
def test_invalid_unicode_cookie(self):
"""
Invalid cookie content should result in an absent cookie, but not in a
crash while trying to decode it (#23638).
"""
environ = self.request_factory.get("/").environ
environ["HTTP_COOKIE"] = "x=W\x03c(h]\x8e"
request = WSGIRequest(environ)
# We don't test COOKIES content, as the result might differ between
# Python version because parsing invalid content became stricter in
# latest versions.
self.assertIsInstance(request.COOKIES, dict)
@override_settings(ROOT_URLCONF="handlers.urls")
def test_invalid_multipart_boundary(self):
"""
Invalid boundary string should produce a "Bad Request" response, not a
server error (#23887).
"""
environ = self.request_factory.post("/malformed_post/").environ
environ["CONTENT_TYPE"] = "multipart/form-data; boundary=WRONG\x07"
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# Expect "bad request" response
self.assertEqual(response.status_code, 400)
@override_settings(ROOT_URLCONF="handlers.urls", MIDDLEWARE=[])
class TransactionsPerRequestTests(TransactionTestCase):
available_apps = []
def test_no_transaction(self):
response = self.client.get("/in_transaction/")
self.assertContains(response, "False")
def test_auto_transaction(self):
old_atomic_requests = connection.settings_dict["ATOMIC_REQUESTS"]
try:
connection.settings_dict["ATOMIC_REQUESTS"] = True
response = self.client.get("/in_transaction/")
finally:
connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests
self.assertContains(response, "True")
async def test_auto_transaction_async_view(self):
old_atomic_requests = connection.settings_dict["ATOMIC_REQUESTS"]
try:
connection.settings_dict["ATOMIC_REQUESTS"] = True
msg = "You cannot use ATOMIC_REQUESTS with async views."
with self.assertRaisesMessage(RuntimeError, msg):
await self.async_client.get("/async_regular/")
finally:
connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests
def test_no_auto_transaction(self):
old_atomic_requests = connection.settings_dict["ATOMIC_REQUESTS"]
try:
connection.settings_dict["ATOMIC_REQUESTS"] = True
response = self.client.get("/not_in_transaction/")
finally:
connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests
self.assertContains(response, "False")
try:
connection.settings_dict["ATOMIC_REQUESTS"] = True
response = self.client.get("/not_in_transaction_using_none/")
finally:
connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests
self.assertContains(response, "False")
try:
connection.settings_dict["ATOMIC_REQUESTS"] = True
response = self.client.get("/not_in_transaction_using_text/")
finally:
connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests
# The non_atomic_requests decorator is used for an incorrect table.
self.assertContains(response, "True")
@override_settings(ROOT_URLCONF="handlers.urls")
class SignalsTests(SimpleTestCase):
def setUp(self):
self.signals = []
self.signaled_environ = None
request_started.connect(self.register_started)
request_finished.connect(self.register_finished)
def tearDown(self):
request_started.disconnect(self.register_started)
request_finished.disconnect(self.register_finished)
def register_started(self, **kwargs):
self.signals.append("started")
self.signaled_environ = kwargs.get("environ")
def register_finished(self, **kwargs):
self.signals.append("finished")
def test_request_signals(self):
response = self.client.get("/regular/")
self.assertEqual(self.signals, ["started", "finished"])
self.assertEqual(response.content, b"regular content")
self.assertEqual(self.signaled_environ, response.wsgi_request.environ)
def test_request_signals_streaming_response(self):
response = self.client.get("/streaming/")
self.assertEqual(self.signals, ["started"])
self.assertEqual(b"".join(response.streaming_content), b"streaming content")
self.assertEqual(self.signals, ["started", "finished"])
def empty_middleware(get_response):
pass
@override_settings(ROOT_URLCONF="handlers.urls")
class HandlerRequestTests(SimpleTestCase):
request_factory = RequestFactory()
def test_async_view(self):
"""Calling an async view down the normal synchronous path."""
response = self.client.get("/async_regular/")
self.assertEqual(response.status_code, 200)
def test_suspiciousop_in_view_returns_400(self):
response = self.client.get("/suspicious/")
self.assertEqual(response.status_code, 400)
def test_bad_request_in_view_returns_400(self):
response = self.client.get("/bad_request/")
self.assertEqual(response.status_code, 400)
def test_invalid_urls(self):
response = self.client.get("~%A9helloworld")
self.assertEqual(response.status_code, 404)
self.assertEqual(response.context["request_path"], "/~%25A9helloworld")
response = self.client.get("d%aao%aaw%aan%aal%aao%aaa%aad%aa/")
self.assertEqual(
response.context["request_path"],
"/d%25AAo%25AAw%25AAn%25AAl%25AAo%25AAa%25AAd%25AA",
)
response = self.client.get("/%E2%99%E2%99%A5/")
self.assertEqual(response.context["request_path"], "/%25E2%2599%E2%99%A5/")
response = self.client.get("/%E2%98%8E%E2%A9%E2%99%A5/")
self.assertEqual(
response.context["request_path"], "/%E2%98%8E%25E2%25A9%E2%99%A5/"
)
def test_environ_path_info_type(self):
environ = self.request_factory.get("/%E2%A8%87%87%A5%E2%A8%A0").environ
self.assertIsInstance(environ["PATH_INFO"], str)
def test_handle_accepts_httpstatus_enum_value(self):
def start_response(status, headers):
start_response.status = status
environ = self.request_factory.get("/httpstatus_enum/").environ
WSGIHandler()(environ, start_response)
self.assertEqual(start_response.status, "200 OK")
@override_settings(MIDDLEWARE=["handlers.tests.empty_middleware"])
def test_middleware_returns_none(self):
msg = "Middleware factory handlers.tests.empty_middleware returned None."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/")
def test_no_response(self):
msg = (
"The view %s didn't return an HttpResponse object. It returned None "
"instead."
)
tests = (
("/no_response_fbv/", "handlers.views.no_response"),
("/no_response_cbv/", "handlers.views.NoResponse.__call__"),
)
for url, view in tests:
with self.subTest(url=url), self.assertRaisesMessage(
ValueError, msg % view
):
self.client.get(url)
class ScriptNameTests(SimpleTestCase):
def test_get_script_name(self):
# Regression test for #23173
# Test first without PATH_INFO
script_name = get_script_name({"SCRIPT_URL": "/foobar/"})
self.assertEqual(script_name, "/foobar/")
script_name = get_script_name({"SCRIPT_URL": "/foobar/", "PATH_INFO": "/"})
self.assertEqual(script_name, "/foobar")
def test_get_script_name_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, get_script_name
should take that into account when forming SCRIPT_NAME (#17133).
"""
script_name = get_script_name(
{
"SCRIPT_URL": "/mst/milestones//accounts/login//help",
"PATH_INFO": "/milestones/accounts/login/help",
}
)
self.assertEqual(script_name, "/mst")
@override_settings(ROOT_URLCONF="handlers.urls")
class AsyncHandlerRequestTests(SimpleTestCase):
"""Async variants of the normal handler request tests."""
async def test_sync_view(self):
"""Calling a sync view down the asynchronous path."""
response = await self.async_client.get("/regular/")
self.assertEqual(response.status_code, 200)
async def test_async_view(self):
"""Calling an async view down the asynchronous path."""
response = await self.async_client.get("/async_regular/")
self.assertEqual(response.status_code, 200)
async def test_suspiciousop_in_view_returns_400(self):
response = await self.async_client.get("/suspicious/")
self.assertEqual(response.status_code, 400)
async def test_bad_request_in_view_returns_400(self):
response = await self.async_client.get("/bad_request/")
self.assertEqual(response.status_code, 400)
async def test_no_response(self):
msg = (
"The view handlers.views.no_response didn't return an "
"HttpResponse object. It returned None instead."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get("/no_response_fbv/")
async def test_unawaited_response(self):
msg = (
"The view handlers.views.CoroutineClearingView.__call__ didn't"
" return an HttpResponse object. It returned an unawaited"
" coroutine instead. You may need to add an 'await'"
" into your view."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get("/unawaited/")
|
62989636f48c83b01776c3c4bf70f46abafe15a23335da7ab28ed52f9ce9bd12 | from django.urls import path
from . import views
urlpatterns = [
path("regular/", views.regular),
path("async_regular/", views.async_regular),
path("no_response_fbv/", views.no_response),
path("no_response_cbv/", views.NoResponse()),
path("streaming/", views.streaming),
path("in_transaction/", views.in_transaction),
path("not_in_transaction/", views.not_in_transaction),
path("not_in_transaction_using_none/", views.not_in_transaction_using_none),
path("not_in_transaction_using_text/", views.not_in_transaction_using_text),
path("bad_request/", views.bad_request),
path("suspicious/", views.suspicious),
path("malformed_post/", views.malformed_post),
path("httpstatus_enum/", views.httpstatus_enum),
path("unawaited/", views.async_unawaited),
]
|
fe64ef1b17ec927c63d63230d73464defc9dc9b7f90b6e9b970e13a4a94a058a | import asyncio
from http import HTTPStatus
from django.core.exceptions import BadRequest, SuspiciousOperation
from django.db import connection, transaction
from django.http import HttpResponse, StreamingHttpResponse
from django.views.decorators.csrf import csrf_exempt
def regular(request):
return HttpResponse(b"regular content")
def no_response(request):
pass
class NoResponse:
def __call__(self, request):
pass
def streaming(request):
return StreamingHttpResponse([b"streaming", b" ", b"content"])
def in_transaction(request):
return HttpResponse(str(connection.in_atomic_block))
@transaction.non_atomic_requests
def not_in_transaction(request):
return HttpResponse(str(connection.in_atomic_block))
@transaction.non_atomic_requests(using=None)
def not_in_transaction_using_none(request):
return HttpResponse(str(connection.in_atomic_block))
@transaction.non_atomic_requests(using="incorrect")
def not_in_transaction_using_text(request):
return HttpResponse(str(connection.in_atomic_block))
def bad_request(request):
raise BadRequest()
def suspicious(request):
raise SuspiciousOperation("dubious")
@csrf_exempt
def malformed_post(request):
request.POST
return HttpResponse()
def httpstatus_enum(request):
return HttpResponse(status=HTTPStatus.OK)
async def async_regular(request):
return HttpResponse(b"regular content")
class CoroutineClearingView:
def __call__(self, request):
"""Return an unawaited coroutine (common error for async views)."""
# Store coroutine to suppress 'unawaited' warning message
self._unawaited_coroutine = asyncio.sleep(0)
return self._unawaited_coroutine
def __del__(self):
try:
self._unawaited_coroutine.close()
except AttributeError:
# View was never called.
pass
async_unawaited = CoroutineClearingView()
|
b19dfd95cfa4e33e6dd16d44438faaccbdeaa261f456543e5d4c0bee377acf3c | """Tests related to django.db.backends that haven't been organized."""
import datetime
import threading
import unittest
import warnings
from unittest import mock
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS,
DatabaseError,
IntegrityError,
connection,
connections,
reset_queries,
transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper
from django.db.models.sql.constants import CURSOR
from django.test import (
TestCase,
TransactionTestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
Article,
Object,
ObjectReference,
Person,
Post,
RawData,
Reporter,
ReporterProxy,
SchoolClass,
SQLKeywordsModel,
Square,
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ,
)
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
years = SchoolClass.objects.dates("last_updated", "year")
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
classes = SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query_without_previous_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
with connection.cursor() as cursor:
connection.ops.last_executed_query(cursor, "", ())
def test_debug_sql(self):
list(Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]["sql"].lower()
self.assertIn("select", sql)
self.assertIn(Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""last_executed_query() returns a string."""
data = RawData.objects.filter(raw_data=b"\x00\x46 \xFE").extra(
select={"föö": 1}
)
sql, params = data.query.sql_with_params()
with data.query.get_compiler("default").execute_sql(CURSOR) as cursor:
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, str)
def test_last_executed_query(self):
# last_executed_query() interpolate all parameters, in most cases it is
# not equal to QuerySet.query.
for qs in (
Article.objects.filter(pk=1),
Article.objects.filter(pk__in=(1, 2), reporter__pk=3),
Article.objects.filter(
pk=1,
reporter__pk=9,
).exclude(reporter__pk__in=[2, 1]),
Article.objects.filter(pk__in=list(range(20, 31))),
):
sql, params = qs.query.sql_with_params()
with qs.query.get_compiler(DEFAULT_DB_ALIAS).execute_sql(CURSOR) as cursor:
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
str(qs.query),
)
@skipUnlessDBFeature("supports_paramstyle_pyformat")
def test_last_executed_query_dict(self):
square_opts = Square._meta
sql = "INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)" % (
connection.introspection.identifier_converter(square_opts.db_table),
connection.ops.quote_name(square_opts.get_field("root").column),
connection.ops.quote_name(square_opts.get_field("square").column),
)
with connection.cursor() as cursor:
params = {"root": 2, "square": 4}
cursor.execute(sql, params)
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
sql % params,
)
@skipUnlessDBFeature("supports_paramstyle_pyformat")
def test_last_executed_query_dict_overlap_keys(self):
square_opts = Square._meta
sql = "INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(root2)s)" % (
connection.introspection.identifier_converter(square_opts.db_table),
connection.ops.quote_name(square_opts.get_field("root").column),
connection.ops.quote_name(square_opts.get_field("square").column),
)
with connection.cursor() as cursor:
params = {"root": 2, "root2": 4}
cursor.execute(sql, params)
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
sql % params,
)
def test_last_executed_query_with_duplicate_params(self):
square_opts = Square._meta
table = connection.introspection.identifier_converter(square_opts.db_table)
id_column = connection.ops.quote_name(square_opts.get_field("id").column)
root_column = connection.ops.quote_name(square_opts.get_field("root").column)
sql = f"UPDATE {table} SET {root_column} = %s + %s WHERE {id_column} = %s"
with connection.cursor() as cursor:
params = [42, 42, 1]
cursor.execute(sql, params)
last_executed_query = connection.ops.last_executed_query(
cursor, sql, params
)
self.assertEqual(
last_executed_query,
f"UPDATE {table} SET {root_column} = 42 + 42 WHERE {id_column} = 1",
)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"""
An executemany call with too many/not enough parameters will raise an
exception.
"""
with connection.cursor() as cursor:
query = "INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % (
connection.introspection.identifier_converter("backends_square"),
connection.ops.quote_name("root"),
connection.ops.quote_name("square"),
)
with self.assertRaises(Exception):
cursor.executemany(query, [(1, 2, 3)])
with self.assertRaises(Exception):
cursor.executemany(query, [(1,)])
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ["backends"]
def test_sequence_name_length_limits_create(self):
"""Creation of model with long name and long pk name doesn't error."""
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""
An m2m save of a model with a long name and a long m2m field name
doesn't error (#8901).
"""
obj = (
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
)
rel_obj = Person.objects.create(first_name="Django", last_name="Reinhardt")
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""
Sequence resetting as part of a flush with model with long name and
long pk name doesn't error (#8901).
"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = (
VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
)
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sql_list = connection.ops.sql_flush(no_style(), tables, reset_sequences=True)
connection.ops.execute_sql_flush(sql_list)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
Post.objects.create(id=10, name="1st post", text="hello world")
# Reset the sequences for the database
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(
no_style(), [Post]
)
with connection.cursor() as cursor:
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = Post.objects.create(name="New post", text="goodbye world")
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
with connection.cursor():
pass
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
with connection.cursor():
pass
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], "%s")
def test_parameter_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ("%d",))
self.assertEqual(cursor.fetchall()[0], ("%", "%d"))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ["backends"]
def create_squares_with_executemany(self, args):
self.create_squares(args, "format", True)
def create_squares(self, args, paramstyle, multiple):
opts = Square._meta
tbl = connection.introspection.identifier_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field("root").column)
f2 = connection.ops.quote_name(opts.get_field("square").column)
if paramstyle == "format":
query = "INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % (tbl, f1, f2)
elif paramstyle == "pyformat":
query = "INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)" % (
tbl,
f1,
f2,
)
else:
raise ValueError("unsupported paramstyle in test")
with connection.cursor() as cursor:
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i**2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i**2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = ((i, i**2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 5)
args = ((i, i**2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 9)
@skipUnlessDBFeature("supports_paramstyle_pyformat")
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {"root": 3, "square": 9}
self.create_squares(args, "pyformat", multiple=False)
self.assertEqual(Square.objects.count(), 1)
@skipUnlessDBFeature("supports_paramstyle_pyformat")
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{"root": i, "square": i**2} for i in range(-5, 6)]
self.create_squares(args, "pyformat", multiple=True)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i**2)
@skipUnlessDBFeature("supports_paramstyle_pyformat")
def test_cursor_executemany_with_pyformat_iterator(self):
args = ({"root": i, "square": i**2} for i in range(-3, 2))
self.create_squares(args, "pyformat", multiple=True)
self.assertEqual(Square.objects.count(), 5)
args = ({"root": i, "square": i**2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, "pyformat", multiple=True)
self.assertEqual(Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as Unicode objects.
qn = connection.ops.quote_name
Person(first_name="John", last_name="Doe").save()
Person(first_name="Jane", last_name="Doe").save()
Person(first_name="Mary", last_name="Agnelline").save()
Person(first_name="Peter", last_name="Parker").save()
Person(first_name="Clark", last_name="Kent").save()
opts2 = Person._meta
f3, f4 = opts2.get_field("first_name"), opts2.get_field("last_name")
with connection.cursor() as cursor:
cursor.execute(
"SELECT %s, %s FROM %s ORDER BY %s"
% (
qn(f3.column),
qn(f4.column),
connection.introspection.identifier_converter(opts2.db_table),
qn(f3.column),
)
)
self.assertEqual(cursor.fetchone(), ("Clark", "Kent"))
self.assertEqual(
list(cursor.fetchmany(2)), [("Jane", "Doe"), ("John", "Doe")]
)
self.assertEqual(
list(cursor.fetchall()), [("Mary", "Agnelline"), ("Peter", "Parker")]
)
def test_unicode_password(self):
old_password = connection.settings_dict["PASSWORD"]
connection.settings_dict["PASSWORD"] = "françois"
try:
with connection.cursor():
pass
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with Unicode password: %s" % e)
finally:
connection.settings_dict["PASSWORD"] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, "ops"))
self.assertTrue(hasattr(connection.ops, "connection"))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
"""Creating an existing table returns a DatabaseError"""
query = "CREATE TABLE %s (id INTEGER);" % Article._meta.db_table
with connection.cursor() as cursor:
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(
connection.vendor == "postgresql",
"Psycopg specific cursor.closed attribute needed",
)
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg offers us a way to check that by closed attribute.
# So, run only on psycopg for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_is_usable_after_database_disconnects(self):
"""
is_usable() doesn't crash when the database disconnects (#21553).
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
sql = "SELECT 1" + connection.features.bare_select_suffix
with connection.cursor() as cursor:
reset_queries()
cursor.execute(sql)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ["sql", "time"])
self.assertEqual(connection.queries[0]["sql"], sql)
reset_queries()
self.assertEqual(0, len(connection.queries))
sql = "INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % (
connection.introspection.identifier_converter("backends_square"),
connection.ops.quote_name("root"),
connection.ops.quote_name("square"),
)
with connection.cursor() as cursor:
cursor.executemany(sql, [(1, 1), (2, 4)])
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ["sql", "time"])
self.assertEqual(connection.queries[0]["sql"], "2 times: %s" % sql)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature("test_db_allows_multiple_connections")
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
The backend doesn't store an unlimited number of queries (#12581).
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connection = connection.copy()
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
msg = (
"Limit for query logging exceeded, only the last 3 queries will be "
"returned."
)
with self.assertWarnsMessage(UserWarning, msg):
self.assertEqual(3, len(new_connection.queries))
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
@mock.patch("django.db.backends.utils.logger")
@override_settings(DEBUG=True)
def test_queries_logger(self, mocked_logger):
sql = "SELECT 1" + connection.features.bare_select_suffix
with connection.cursor() as cursor:
cursor.execute(sql)
params, kwargs = mocked_logger.debug.call_args
self.assertIn("; alias=%s", params[0])
self.assertEqual(params[2], sql)
self.assertIsNone(params[3])
self.assertEqual(params[4], connection.alias)
self.assertEqual(
list(kwargs["extra"]),
["duration", "sql", "params", "alias"],
)
self.assertEqual(tuple(kwargs["extra"].values()), params[1:])
def test_queries_bare_where(self):
sql = f"SELECT 1{connection.features.bare_select_suffix} WHERE 1=1"
with connection.cursor() as cursor:
cursor.execute(sql)
self.assertEqual(cursor.fetchone(), (1,))
def test_timezone_none_use_tz_false(self):
connection.ensure_connection()
with self.settings(TIME_ZONE=None, USE_TZ=False):
connection.init_connection_state()
# These tests aren't conditional because it would require differentiating
# between MySQL+InnoDB and MySQL+MYISAM (something we currently can't do).
class FkConstraintsTests(TransactionTestCase):
available_apps = ["backends"]
def setUp(self):
# Create a Reporter.
self.r = Reporter.objects.create(first_name="John", last_name="Smith")
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = Article(
headline="This is a test",
pub_date=datetime.datetime(2005, 7, 27),
reporter_id=30,
)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
a2 = Article(
headline="This is another test",
reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30,
)
with self.assertRaises(IntegrityError):
a2.save()
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a1 = Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
# Create another article
r_proxy = ReporterProxy.objects.get(pk=self.r.pk)
Article.objects.create(
headline="Another article",
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r,
reporter_proxy=r_proxy,
)
# Retrieve the second article from the DB
a2 = Article.objects.get(headline="Another article")
a2.reporter_proxy_id = 30
with self.assertRaises(IntegrityError):
a2.save()
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data
without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be
able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
try:
connection.check_constraints(table_names=[Article._meta.db_table])
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
transaction.set_rollback(True)
def test_check_constraints_sql_keywords(self):
with transaction.atomic():
obj = SQLKeywordsModel.objects.create(reporter=self.r)
obj.refresh_from_db()
obj.reporter_id = 30
with connection.constraint_checks_disabled():
obj.save()
try:
connection.check_constraints(table_names=["order"])
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ["backends"]
def test_default_connection_thread_local(self):
"""
The default connection (i.e. django.db.connection) is different for
each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
with connection.cursor():
pass
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.inc_thread_sharing()
with connection.cursor():
pass
connections_dict[id(connection)] = connection
try:
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Each created connection got different inner connection.
self.assertEqual(
len({conn.connection for conn in connections_dict.values()}), 3
)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection and conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_connections_thread_local(self):
"""
The connections are different for each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.inc_thread_sharing()
connections_dict[id(conn)] = conn
try:
num_new_threads = 2
for x in range(num_new_threads):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(
len(connections_dict),
len(connections.all()) * (num_new_threads + 1),
)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection and conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_pass_connection_between_threads(self):
"""
A connection can be passed from one thread to the other (#17258).
"""
Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections["default"] = main_thread_connection
try:
Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections["default"]])
t.start()
t.join()
# Without touching thread sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
connections["default"].close()
# After calling inc_thread_sharing() on the connection.
connections["default"].inc_thread_sharing()
try:
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
finally:
connections["default"].dec_thread_sharing()
def test_closing_non_shared_connections(self):
"""
A connection that is not explicitly shareable cannot be closed by
another thread (#17258).
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections["default"]])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections["default"].inc_thread_sharing()
try:
t2 = threading.Thread(target=runner2, args=[connections["default"]])
t2.start()
t2.join()
finally:
connections["default"].dec_thread_sharing()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
def test_thread_sharing_count(self):
self.assertIs(connection.allow_thread_sharing, False)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, False)
msg = "Cannot decrement the thread sharing count below zero."
with self.assertRaisesMessage(RuntimeError, msg):
connection.dec_thread_sharing()
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key if the
NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled.
"""
@skipIfDBFeature("allows_auto_pk_0")
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = Object.objects.create()
ref = ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(Object.objects.filter(id=12345).exists())
ref = ObjectReference.objects.create(obj_id=12345)
ref_new = ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = Object.objects.create()
obj.related_objects.create()
self.assertEqual(Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = Object._meta.get_field(
"related_objects"
).remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
|
5abda27d79b5120d5aef82ed3891512135f5d0609f6e2980c0089a99574c9559 | import datetime
import itertools
import unittest
from copy import copy
from unittest import mock
from django.core.exceptions import FieldError
from django.core.management.color import no_style
from django.db import (
DatabaseError,
DataError,
IntegrityError,
OperationalError,
connection,
)
from django.db.models import (
CASCADE,
PROTECT,
AutoField,
BigAutoField,
BigIntegerField,
BinaryField,
BooleanField,
CharField,
CheckConstraint,
DateField,
DateTimeField,
DecimalField,
DurationField,
F,
FloatField,
ForeignKey,
ForeignObject,
Index,
IntegerField,
JSONField,
ManyToManyField,
Model,
OneToOneField,
OrderBy,
PositiveIntegerField,
Q,
SlugField,
SmallAutoField,
SmallIntegerField,
TextField,
TimeField,
UniqueConstraint,
UUIDField,
Value,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper
from django.db.models.indexes import IndexExpression
from django.db.transaction import TransactionManagementError, atomic
from django.test import (
TransactionTestCase,
ignore_warnings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup
from django.utils.deprecation import RemovedInDjango51Warning
from .fields import CustomManyToManyField, InheritedManyToManyField, MediumBlobField
from .models import (
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
AuthorWithIndexedName,
AuthorWithUniqueName,
AuthorWithUniqueNameAndBirthday,
Book,
BookForeignObj,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithoutAuthor,
BookWithSlug,
IntegerPK,
Node,
Note,
NoteRename,
Tag,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
Book,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithSlug,
IntegerPK,
Node,
Note,
Tag,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
# isolated_local_models contains models that are in test methods
# decorated with @isolate_apps.
self.isolated_local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if "schema" in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models["schema"][through._meta.model_name]
del new_apps.all_models["schema"][model._meta.model_name]
if self.isolated_local_models:
with connection.schema_editor() as editor:
for model in self.isolated_local_models:
editor.delete_model(model)
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
if connection.features.ignores_table_name_case:
table_names = [table_name.lower() for table_name in table_names]
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if connection.features.ignores_table_name_case:
tbl = tbl.lower()
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["index"] and len(c["columns"]) == 1
]
def get_uniques(self, table):
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["unique"] and len(c["columns"]) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details["columns"] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(
self,
schema_editor,
model,
field,
field_name,
expected_default,
cast_function=None,
):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute(
"SELECT {} FROM {};".format(field_name, model._meta.db_table)
)
database_default = cursor.fetchall()[0][0]
if cast_function and type(database_default) != type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {"fks": 0, "uniques": 0, "indexes": 0}
for c in constraints.values():
if c["columns"] == [column]:
if c["foreign_key"] == fk_to:
counts["fks"] += 1
if c["unique"]:
counts["uniques"] += 1
elif c["index"]:
counts["indexes"] += 1
return counts
def get_column_collation(self, table, column):
with connection.cursor() as cursor:
return next(
f.collation
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def get_column_comment(self, table, column):
with connection.cursor() as cursor:
return next(
f.comment
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def get_table_comment(self, table):
with connection.cursor() as cursor:
return next(
t.comment
for t in connection.introspection.get_table_list(cursor)
if t.name == table
)
def assert_column_comment_not_exists(self, table, column):
with connection.cursor() as cursor:
columns = connection.introspection.get_table_description(cursor, table)
self.assertFalse(any([c.name == column and c.comment for c in columns]))
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]["orders"]
self.assertTrue(
all(val == expected for val, expected in zip(index_orders, order))
)
def assertForeignKeyExists(self, model, column, expected_fk_table, field="id"):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
if not connection.features.can_introspect_foreign_keys:
return
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for details in constraints.values():
if details["columns"] == [column] and details["foreign_key"]:
constraint_fk = details["foreign_key"]
break
self.assertEqual(constraint_fk, (expected_fk_table, field))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
if not connection.features.can_introspect_foreign_keys:
return
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
with connection.schema_editor() as editor:
# Create the table
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
editor.delete_model(Author)
# No deferred SQL should be left over.
self.assertEqual(editor.deferred_sql, [])
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, "author_id", "schema_tag")
@skipUnlessDBFeature("can_create_inline_fk")
def test_inline_fk(self):
# Create some tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.create_model(Note)
self.assertForeignKeyNotExists(Note, "book_id", "schema_book")
# Add a foreign key from one to the other.
with connection.schema_editor() as editor:
new_field = ForeignKey(Book, CASCADE)
new_field.set_attributes_from_name("book")
editor.add_field(Note, new_field)
self.assertForeignKeyExists(Note, "book_id", "schema_book")
# Creating a FK field with a constraint uses a single statement without
# a deferred ALTER TABLE.
self.assertFalse(
[
sql
for sql in (str(statement) for statement in editor.deferred_sql)
if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql
]
)
@skipUnlessDBFeature("can_create_inline_fk")
def test_add_inline_fk_update_data(self):
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key and update data in the same transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
assertIndex = (
self.assertIn
if connection.features.indexes_foreign_keys
else self.assertNotIn
)
assertIndex("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature(
"can_create_inline_fk",
"allows_multiple_constraints_on_same_fields",
)
@isolate_apps("schema")
def test_add_inline_fk_index_update_data(self):
class Node(Model):
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key, update data, and an index in the same
# transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
Node._meta.add_field(new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
editor.add_index(
Node, Index(fields=["new_parent_fk"], name="new_parent_inline_fk_idx")
)
self.assertIn("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature("supports_foreign_keys")
def test_char_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorCharFieldWithIndex)
# Change CharField to FK
old_field = AuthorCharFieldWithIndex._meta.get_field("char_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("char_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorCharFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorCharFieldWithIndex, "char_field_id", "schema_author"
)
@skipUnlessDBFeature("supports_foreign_keys")
@skipUnlessDBFeature("supports_index_on_text_field")
def test_text_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorTextFieldWithIndex)
# Change TextField to FK
old_field = AuthorTextFieldWithIndex._meta.get_field("text_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("text_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorTextFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorTextFieldWithIndex, "text_field_id", "schema_author"
)
@isolate_apps("schema")
def test_char_field_pk_to_auto_field(self):
class Foo(Model):
id = CharField(max_length=255, primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = "schema"
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, "tag_id", "schema_tag")
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
@isolate_apps("schema")
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = "schema"
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name("id")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(
new_field.remote_field.through, "tag_id", "schema_tag"
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(
connection
) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
"column": editor.quote_name(new_field.name),
}
self.assertFalse(
any(drop_default_sql in query["sql"] for query in ctx.captured_queries)
)
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), False
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), False
)
columns = self.column_classes(Author)
self.assertEqual(
columns["age"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertTrue(columns["age"][1][6])
def test_add_field_remove_field(self):
"""
Adding a field and removing it removes all deferred sql referring to it.
"""
with connection.schema_editor() as editor:
# Create a table with a unique constraint on the slug field.
editor.create_model(Tag)
# Remove the slug column.
editor.remove_field(Tag, Tag._meta.get_field("slug"))
self.assertEqual(editor.deferred_sql, [])
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["surname"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
columns["surname"][1][6],
connection.features.interprets_empty_strings_as_nulls,
)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns["awesome"][0]
self.assertEqual(
field_type, connection.features.introspected_field_types["BooleanField"]
)
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns["thing"]
self.assertEqual(
field_type, connection.features.introspected_field_types["IntegerField"]
)
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_o2o_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Note)
new_field = OneToOneField(Note, CASCADE, null=True)
new_field.set_attributes_from_name("note")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertIn("note_id", columns)
self.assertTrue(columns["note_id"][1][6])
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns["bits"][0], ("BinaryField", "TextField"))
def test_add_field_durationfield_with_default(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = DurationField(default=datetime.timedelta(minutes=10))
new_field.set_attributes_from_name("duration")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["duration"][0],
connection.features.introspected_field_types["DurationField"],
)
@unittest.skipUnless(connection.vendor == "mysql", "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b"123")
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns["bits"][0], "TextField")
@isolate_apps("schema")
def test_add_auto_field(self):
class AddAutoFieldModel(Model):
name = CharField(max_length=255, primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(AddAutoFieldModel)
self.isolated_local_models = [AddAutoFieldModel]
old_field = AddAutoFieldModel._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
new_field.model = AddAutoFieldModel
with connection.schema_editor() as editor:
editor.alter_field(AddAutoFieldModel, old_field, new_field)
new_auto_field = AutoField(primary_key=True)
new_auto_field.set_attributes_from_name("id")
new_auto_field.model = AddAutoFieldModel()
with connection.schema_editor() as editor:
editor.add_field(AddAutoFieldModel, new_auto_field)
# Crashes on PostgreSQL when the GENERATED BY suffix is missing.
AddAutoFieldModel.objects.create(name="test")
def test_remove_field(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with CaptureQueriesContext(connection) as ctx:
editor.remove_field(Author, Author._meta.get_field("name"))
columns = self.column_classes(Author)
self.assertNotIn("name", columns)
if getattr(connection.features, "can_alter_table_drop_column", True):
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
def test_remove_indexed_field(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorCharFieldWithIndex)
with connection.schema_editor() as editor:
editor.remove_field(
AuthorCharFieldWithIndex,
AuthorCharFieldWithIndex._meta.get_field("char_field"),
)
columns = self.column_classes(AuthorCharFieldWithIndex)
self.assertNotIn("char_field", columns)
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertTrue(columns["name"][1][6])
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
def test_alter_auto_field_to_integer_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to IntegerField
old_field = Author._meta.get_field("id")
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Now that ID is an IntegerField, the database raises an error if it
# isn't provided.
if not connection.features.supports_unspecified_pk:
with self.assertRaises(DatabaseError):
Author.objects.create()
def test_alter_auto_field_to_char_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to CharField
old_field = Author._meta.get_field("id")
new_field = CharField(primary_key=True, max_length=50)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@isolate_apps("schema")
def test_alter_auto_field_quoted_db_column(self):
class Foo(Model):
id = AutoField(primary_key=True, db_column='"quoted_id"')
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.db_column = '"quoted_id"'
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_not_unique_field_to_primary_key(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change UUIDField to primary key.
old_field = Author._meta.get_field("uuid")
new_field = UUIDField(primary_key=True)
new_field.set_attributes_from_name("uuid")
new_field.model = Author
with connection.schema_editor() as editor:
editor.remove_field(Author, Author._meta.get_field("id"))
editor.alter_field(Author, old_field, new_field, strict=True)
# Redundant unique constraint is not added.
count = self.get_constraints_count(
Author._meta.db_table,
Author._meta.get_field("uuid").column,
None,
)
self.assertLessEqual(count["uniques"], 1)
@isolate_apps("schema")
def test_alter_primary_key_quoted_db_table(self):
class Foo(Model):
class Meta:
app_label = "schema"
db_table = '"foo"'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_text_field_to_not_null_with_default_value(self):
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("address")
new_field = TextField(blank=True, default="", null=False)
new_field.set_attributes_from_name("address")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature("can_defer_constraint_checks", "can_rollback_ddl")
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("parent")
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name("parent")
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
@isolate_apps("schema")
def test_alter_null_with_default_value_deferred_constraints(self):
class Publisher(Model):
class Meta:
app_label = "schema"
class Article(Model):
publisher = ForeignKey(Publisher, CASCADE)
title = CharField(max_length=50, null=True)
description = CharField(max_length=100, null=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Publisher)
editor.create_model(Article)
self.isolated_local_models = [Article, Publisher]
publisher = Publisher.objects.create()
Article.objects.create(publisher=publisher)
old_title = Article._meta.get_field("title")
new_title = CharField(max_length=50, null=False, default="")
new_title.set_attributes_from_name("title")
old_description = Article._meta.get_field("description")
new_description = CharField(max_length=100, null=False, default="")
new_description.set_attributes_from_name("description")
with connection.schema_editor() as editor:
editor.alter_field(Article, old_title, new_title, strict=True)
editor.alter_field(Article, old_description, new_description, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05")
old_field = Note._meta.get_field("info")
new_field = DateField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05 3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = TimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
@skipUnlessDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_not_null_to_null(self):
"""
Nullability for textual fields is preserved on databases that
interpret empty strings as NULLs.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
columns = self.column_classes(Author)
# Field is nullable.
self.assertTrue(columns["uuid"][1][6])
# Change to NOT NULL.
old_field = Author._meta.get_field("uuid")
new_field = SlugField(null=False, blank=True)
new_field.set_attributes_from_name("uuid")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
# Nullability is preserved.
self.assertTrue(columns["uuid"][1][6])
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="aaa")
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="bbb")
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns["height"][1][6])
# Create some test data
Author.objects.create(name="Not null author", height=12)
Author.objects.create(name="Null author")
# Verify null value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertIsNone(Author.objects.get(name="Null author").height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertFalse(columns["height"][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertEqual(Author.objects.get(name="Null author").height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field("name")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_char_field_decrease_length(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name="x" * 255)
# Change max_length of CharField.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
msg = "value too long for type character varying(254)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_with_custom_db_type(self):
from django.contrib.postgres.fields import ArrayField
class Foo(Model):
field = ArrayField(CharField(max_length=255))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("field")
new_field = ArrayField(CharField(max_length=16))
new_field.set_attributes_from_name("field")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=["x" * 16])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(CharField(max_length=15))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_nested_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(ArrayField(CharField(max_length=16)))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=[["x" * 16]])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(ArrayField(CharField(max_length=15)))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_db_collation_arrayfield(self):
from django.contrib.postgres.fields import ArrayField
ci_collation = "case_insensitive"
cs_collation = "en-x-icu"
def drop_collation():
with connection.cursor() as cursor:
cursor.execute(f"DROP COLLATION IF EXISTS {ci_collation}")
with connection.cursor() as cursor:
cursor.execute(
f"CREATE COLLATION IF NOT EXISTS {ci_collation} (provider = icu, "
f"locale = 'und-u-ks-level2', deterministic = false)"
)
self.addCleanup(drop_collation)
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16, db_collation=ci_collation))
class Meta:
app_label = "schema"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
self.assertEqual(
self.get_column_collation(ArrayModel._meta.db_table, "field"),
ci_collation,
)
# Alter collation.
old_field = ArrayModel._meta.get_field("field")
new_field_cs = ArrayField(CharField(max_length=16, db_collation=cs_collation))
new_field_cs.set_attributes_from_name("field")
new_field_cs.model = ArrayField
with connection.schema_editor() as editor:
editor.alter_field(ArrayModel, old_field, new_field_cs, strict=True)
self.assertEqual(
self.get_column_collation(ArrayModel._meta.db_table, "field"),
cs_collation,
)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_unique_with_collation_charfield(self):
ci_collation = "case_insensitive"
def drop_collation():
with connection.cursor() as cursor:
cursor.execute(f"DROP COLLATION IF EXISTS {ci_collation}")
with connection.cursor() as cursor:
cursor.execute(
f"CREATE COLLATION IF NOT EXISTS {ci_collation} (provider = icu, "
f"locale = 'und-u-ks-level2', deterministic = false)"
)
self.addCleanup(drop_collation)
class CiCharModel(Model):
field = CharField(max_length=16, db_collation=ci_collation, unique=True)
class Meta:
app_label = "schema"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(CiCharModel)
self.isolated_local_models = [CiCharModel]
self.assertEqual(
self.get_column_collation(CiCharModel._meta.db_table, "field"),
ci_collation,
)
self.assertIn("field", self.get_uniques(CiCharModel._meta.db_table))
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field("info")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns["height"][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns["height"][1][6])
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for details in constraints.values():
if details["foreign_key"]:
self.fail(
"Found an unexpected FK constraint to %s" % details["columns"]
)
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique anymore
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
Book.objects.all().delete()
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique now
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for
# OneToOneField.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
def test_autofield_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Note)
# Rename the field.
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("note_ptr")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Alter AutoField to OneToOneField.
new_field_o2o = OneToOneField(Note, CASCADE)
new_field_o2o.set_attributes_from_name("note_ptr")
new_field_o2o.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field_o2o, strict=True)
columns = self.column_classes(Author)
field_type, _ = columns["note_ptr_id"]
self.assertEqual(
field_type, connection.features.introspected_field_types["IntegerField"]
)
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for
# ForeignKey.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 0, "indexes": 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
@skipUnlessDBFeature("ignores_table_name_case")
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name="Foo")
Author.objects.create(name="Bar")
def test_alter_autofield_pk_to_bigautofield_pk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_autofield_pk_to_smallautofield_pk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = SmallAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToAutoField(Model):
i = AutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
def test_alter_int_pk_to_bigautofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
BigAutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = BigAutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToBigAutoField(Model):
i = BigAutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToBigAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
@isolate_apps("schema")
def test_alter_smallint_pk_to_smallautofield_pk(self):
"""
Should be able to rename an SmallIntegerField(primary_key=True) to
SmallAutoField(primary_key=True).
"""
class SmallIntegerPK(Model):
i = SmallIntegerField(primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(SmallIntegerPK)
self.isolated_local_models = [SmallIntegerPK]
old_field = SmallIntegerPK._meta.get_field("i")
new_field = SmallAutoField(primary_key=True)
new_field.model = SmallIntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_serial_auto_field_to_bigautofield(self):
class SerialAutoField(Model):
id = SmallAutoField(primary_key=True)
class Meta:
app_label = "schema"
table = SerialAutoField._meta.db_table
column = SerialAutoField._meta.get_field("id").column
with connection.cursor() as cursor:
cursor.execute(
f'CREATE TABLE "{table}" '
f'("{column}" smallserial NOT NULL PRIMARY KEY)'
)
try:
old_field = SerialAutoField._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = SerialAutoField
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(SerialAutoField, old_field, new_field, strict=True)
sequence_name = f"{table}_{column}_seq"
with connection.cursor() as cursor:
cursor.execute(
"SELECT data_type FROM pg_sequences WHERE sequencename = %s",
[sequence_name],
)
row = cursor.fetchone()
sequence_data_type = row[0] if row and row[0] else None
self.assertEqual(sequence_data_type, "bigint")
# Rename the column.
old_field = new_field
new_field = AutoField(primary_key=True)
new_field.model = SerialAutoField
new_field.set_attributes_from_name("renamed_id")
with connection.schema_editor() as editor:
editor.alter_field(SerialAutoField, old_field, new_field, strict=True)
with connection.cursor() as cursor:
cursor.execute(
"SELECT data_type FROM pg_sequences WHERE sequencename = %s",
[sequence_name],
)
row = cursor.fetchone()
sequence_data_type = row[0] if row and row[0] else None
self.assertEqual(sequence_data_type, "integer")
finally:
with connection.cursor() as cursor:
cursor.execute(f'DROP TABLE "{table}"')
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# Delete the old PK
old_field = IntegerPK._meta.get_field("i")
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# The primary key constraint is gone. Result depends on database:
# 'id' for SQLite, None for others (must not be 'i').
self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ("id", None))
# Set up a model class as it currently stands. The original IntegerPK
# class is now out of date and some backends make use of the whole
# model class when modifying a field (such as sqlite3 when remaking a
# table) so an outdated model class leads to incorrect results.
class Transitional(Model):
i = IntegerField(unique=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# model requires a new PK
old_field = Transitional._meta.get_field("j")
new_field = IntegerField(primary_key=True)
new_field.model = Transitional
new_field.set_attributes_from_name("j")
with connection.schema_editor() as editor:
editor.alter_field(Transitional, old_field, new_field, strict=True)
# Create a model class representing the updated model.
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(
columns["display_name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("name", columns)
@isolate_apps("schema")
def test_rename_referenced_field(self):
class Author(Model):
name = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE, to_field="name")
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# Ensure the foreign key reference was updated.
self.assertForeignKeyExists(Book, "author_id", "schema_author", "renamed")
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["detail_info"][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass(
"TagM2MTest", related_name="books", through=LocalTagThrough
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(
columns["book_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertEqual(
columns["tag_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def test_m2m_through_remove(self):
class LocalAuthorNoteThrough(Model):
book = ForeignKey("schema.Author", CASCADE)
tag = ForeignKey("self", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalNoteWithM2MThrough(Model):
authors = ManyToManyField("schema.Author", through=LocalAuthorNoteThrough)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorNoteThrough, LocalNoteWithM2MThrough]
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalAuthorNoteThrough)
editor.create_model(LocalNoteWithM2MThrough)
# Remove the through parameter.
old_field = LocalNoteWithM2MThrough._meta.get_field("authors")
new_field = ManyToManyField("Author")
new_field.set_attributes_from_name("authors")
msg = (
f"Cannot alter field {old_field} into {new_field} - they are not "
f"compatible types (you cannot alter to or from M2M fields, or add or "
f"remove through= on M2M fields)"
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.alter_field(LocalNoteWithM2MThrough, old_field, new_field)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with CaptureQueriesContext(
connection
) as ctx, connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Table is not rebuilt.
self.assertEqual(
len(
[
query["sql"]
for query in ctx.captured_queries
if "CREATE TABLE" in query["sql"]
]
),
1,
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models["schema"][
new_field.remote_field.through._meta.model_name
]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(
LocalAuthorWithM2MThrough, old_field, new_field, strict=True
)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
"tagm2mtest_id",
"schema_tagm2mtest",
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
new_field.remote_field.through, "uniquetest_id", "schema_uniquetest"
)
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@isolate_apps("schema")
def test_m2m_rename_field_in_target_model(self):
class LocalTagM2MTest(Model):
title = CharField(max_length=255)
class Meta:
app_label = "schema"
class LocalM2M(Model):
tags = ManyToManyField(LocalTagM2MTest)
class Meta:
app_label = "schema"
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(LocalM2M)
editor.create_model(LocalTagM2MTest)
self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
# Ensure the m2m table is there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
# Alter a field in LocalTagM2MTest.
old_field = LocalTagM2MTest._meta.get_field("title")
new_field = CharField(max_length=254)
new_field.contribute_to_class(LocalTagM2MTest, "title1")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
# Ensure the m2m table is still there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for details in constraints.values():
if details["columns"] == ["height"] and details["check"]:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
@isolate_apps("schema")
def test_check_constraint_timedelta_param(self):
class DurationModel(Model):
duration = DurationField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(DurationModel)
self.isolated_local_models = [DurationModel]
constraint_name = "duration_gte_5_minutes"
constraint = CheckConstraint(
check=Q(duration__gt=datetime.timedelta(minutes=5)),
name=constraint_name,
)
DurationModel._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(DurationModel, constraint)
constraints = self.get_constraints(DurationModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with self.assertRaises(IntegrityError), atomic():
DurationModel.objects.create(duration=datetime.timedelta(minutes=4))
DurationModel.objects.create(duration=datetime.timedelta(minutes=10))
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_remove_field_check_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the custom check constraint
constraint = CheckConstraint(
check=Q(height__gte=0), name="author_height_gte_0_check"
)
custom_constraint_name = constraint.name
Author._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field check
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field check
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the check constraint
with connection.schema_editor() as editor:
Author._meta.constraints = []
editor.remove_constraint(Author, constraint)
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_name_quoting(self):
old_table_name = TagUniqueRename._meta.db_table
try:
with connection.schema_editor() as editor:
editor.create_model(TagUniqueRename)
editor.alter_db_table(TagUniqueRename, old_table_name, "unique-table")
TagUniqueRename._meta.db_table = "unique-table"
# This fails if the unique index name isn't quoted.
editor.alter_unique_together(TagUniqueRename, [], (("title", "slug2"),))
finally:
with connection.schema_editor() as editor:
editor.delete_model(TagUniqueRename)
TagUniqueRename._meta.db_table = old_table_name
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_unique_no_unnecessary_fk_drops(self):
"""
If AlterField isn't selective about dropping foreign key constraints
when modifying a field with a unique constraint, the AlterField
incorrectly drops and recreates the Book.author foreign key even though
it doesn't restrict the field being changed (#29193).
"""
class Author(Model):
name = CharField(max_length=254, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.model = Author
new_field.set_attributes_from_name("name")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
@isolate_apps("schema")
def test_unique_and_reverse_m2m(self):
"""
AlterField can modify a unique field when there's a reverse M2M
relation on the model.
"""
class Tag(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = "schema"
class Book(Model):
tags = ManyToManyField(Tag, related_name="books")
class Meta:
app_label = "schema"
self.isolated_local_models = [Book._meta.get_field("tags").remote_field.through]
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Book)
new_field = SlugField(max_length=75, unique=True)
new_field.model = Tag
new_field.set_attributes_from_name("slug")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Tag, Tag._meta.get_field("slug"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
# Ensure that the field is still unique.
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_field_unique_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueName)
self.local_models = [AuthorWithUniqueName]
# Add the custom unique constraint
constraint = UniqueConstraint(fields=["name"], name="author_name_uniq")
custom_constraint_name = constraint.name
AuthorWithUniqueName._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueName, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field uniqueness
old_field = AuthorWithUniqueName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field uniqueness
new_field2 = AuthorWithUniqueName._meta.get_field("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueName._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueName, constraint)
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, UniqueTest._meta.unique_together, []
)
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, [], UniqueTest._meta.unique_together
)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
def _test_composed_index_with_fk(self, index):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
table = Book._meta.db_table
self.assertEqual(Book._meta.indexes, [])
Book._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(Book, index)
self.assertIn(index.name, self.get_constraints(table))
Book._meta.indexes = []
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
def test_composed_index_with_fk(self):
index = Index(fields=["author", "title"], name="book_author_title_idx")
self._test_composed_index_with_fk(index)
def test_composed_desc_index_with_fk(self):
index = Index(fields=["-author", "title"], name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_func_index_with_fk(self):
index = Index(F("author"), F("title"), name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_desc_func_index_with_fk(self):
index = Index(F("author").desc(), F("title"), name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_func_transform_index_with_fk(self):
index = Index(F("title__lower"), name="book_title_lower_idx")
with register_lookup(CharField, Lower):
self._test_composed_index_with_fk(index)
def _test_composed_constraint_with_fk(self, constraint):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
table = Book._meta.db_table
self.assertEqual(Book._meta.constraints, [])
Book._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Book, constraint)
self.assertIn(constraint.name, self.get_constraints(table))
Book._meta.constraints = []
with connection.schema_editor() as editor:
editor.remove_constraint(Book, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
def test_composed_constraint_with_fk(self):
constraint = UniqueConstraint(
fields=["author", "title"],
name="book_author_title_uniq",
)
self._test_composed_constraint_with_fk(constraint)
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_composed_check_constraint_with_fk(self):
constraint = CheckConstraint(check=Q(author__gt=0), name="book_author_check")
self._test_composed_constraint_with_fk(constraint)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueNameAndBirthday)
self.local_models = [AuthorWithUniqueNameAndBirthday]
# Add the custom unique constraint
constraint = UniqueConstraint(
fields=["name", "birthday"], name="author_name_birthday_uniq"
)
custom_constraint_name = constraint.name
AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Remove unique together
unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, unique_together, []
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add unique together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, [], unique_together
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueNameAndBirthday._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint)
def test_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(fields=["name"], name="name_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIs(sql.references_table(table), True)
self.assertIs(sql.references_column(table, "name"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Upper("name").desc(), name="func_upper_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC"])
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Upper("title"),
Lower("slug"),
name="func_upper_lower_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains database functions.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
sql = str(sql)
self.assertIn("UPPER(%s)" % editor.quote_name("title"), sql)
self.assertIn("LOWER(%s)" % editor.quote_name("slug"), sql)
self.assertLess(sql.index("UPPER"), sql.index("LOWER"))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_unique_constraint_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
F("height").desc(),
"uuid",
Lower("name").asc(),
name="func_f_lower_field_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC", "ASC"])
constraints = self.get_constraints(table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(len(constraints[constraint.name]["columns"]), 3)
self.assertEqual(constraints[constraint.name]["columns"][1], "uuid")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "uuid"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_partial_indexes")
def test_func_unique_constraint_partial(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_cond_weight_uq",
condition=Q(weight__isnull=False),
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"WHERE %s IS NOT NULL" % editor.quote_name("weight"),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_covering_indexes")
def test_func_unique_constraint_covering(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_covering_uq",
include=["weight", "height"],
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(
constraints[constraint.name]["columns"],
[None, "weight", "height"],
)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
self.assertIs(sql.references_column(table, "height"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"INCLUDE (%s, %s)"
% (
editor.quote_name("weight"),
editor.quote_name("height"),
),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
constraint = UniqueConstraint(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_unique_constraint_unsupported(self):
# UniqueConstraint is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(F("name"), name="func_name_uq")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_constraint(Author, constraint))
self.assertIsNone(editor.remove_constraint(Author, constraint))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nonexistent_field(self):
constraint = UniqueConstraint(Lower("nonexistent"), name="func_nonexistent_uq")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Random(), name="func_random_uq")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_constraint(Author, constraint)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
True,
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [["author", "title"]], [])
@ignore_warnings(category=RemovedInDjango51Warning)
@isolate_apps("schema")
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
class TagIndexed(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = "schema"
index_together = [["slug", "title"]]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
self.isolated_local_models = [TagIndexed]
# Ensure there is an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c["columns"] == ["slug", "title"]
),
True,
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
@ignore_warnings(category=RemovedInDjango51Warning)
@isolate_apps("schema")
def test_remove_index_together_does_not_remove_meta_indexes(self):
class AuthorWithIndexedNameAndBirthday(Model):
name = CharField(max_length=255)
birthday = DateField()
class Meta:
app_label = "schema"
index_together = [["name", "birthday"]]
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedNameAndBirthday)
self.isolated_local_models = [AuthorWithIndexedNameAndBirthday]
# Add the custom index
index = Index(fields=["name", "birthday"], name="author_name_birthday_idx")
custom_index_name = index.name
AuthorWithIndexedNameAndBirthday._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedNameAndBirthday, index)
# Ensure the indexes exist
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Remove index together
index_together = AuthorWithIndexedNameAndBirthday._meta.index_together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, index_together, []
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add index together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, [], index_together
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the index
with connection.schema_editor() as editor:
AuthorWithIndexedNameAndBirthday._meta.indexes = []
editor.remove_index(AuthorWithIndexedNameAndBirthday, index)
@isolate_apps("schema")
def test_db_table(self):
"""
Tests renaming of the table
"""
class Author(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
# Create the table and one referring it.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Alter the table
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Ensure the foreign key reference was updated
self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
# Alter the table again
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=["name"], name="author_title_idx")
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn("name", self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn("name", self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
self.local_models = [AuthorWithIndexedName]
# Ensure the table has its index
self.assertIn("name", self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=["-name"], name="author_name_idx")
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
table_name=AuthorWithIndexedName._meta.db_table,
column_names=("name",),
)
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithIndexedName, old_field, new_field, strict=True
)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
index_name = "author_name_idx"
# Add the index
index = Index(fields=["name", "-weight"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Author._meta.db_table, index_name, ["ASC", "DESC"])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
def test_text_field_with_db_index(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorTextFieldWithIndex)
# The text_field index is present if the database supports it.
assertion = (
self.assertIn
if connection.features.supports_index_on_text_field
else self.assertNotIn
)
assertion(
"text_field", self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)
)
def _index_expressions_wrappers(self):
index_expression = IndexExpression()
index_expression.set_wrapper_classes(connection)
return ", ".join(
[
wrapper_cls.__qualname__
for wrapper_cls in index_expression.wrapper_classes
]
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_multiple_wrapper_references(self):
index = Index(OrderBy(F("name").desc(), descending=True), name="name")
msg = (
"Multiple references to %s can't be used in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_invalid_topmost_expressions(self):
index = Index(Upper(F("name").desc()), name="name")
msg = (
"%s must be topmost expressions in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name").desc(), name="func_lower_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_f(self):
with connection.schema_editor() as editor:
editor.create_model(Tag)
index = Index("slug", F("title").desc(), name="func_f_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Tag, index)
sql = index.create_sql(Tag, editor)
table = Tag._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Tag._meta.db_table, index.name, ["ASC", "DESC"])
# SQL contains columns.
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIs(sql.references_column(table, "title"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Tag, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
index = Index(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name"), Upper("name"), name="func_lower_upper_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains database functions.
self.assertIs(sql.references_column(table, "name"), True)
sql = str(sql)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), sql)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), sql)
self.assertLess(sql.index("LOWER"), sql.index("UPPER"))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
index = Index(
F("author").desc(),
Lower("title").asc(),
"pub_date",
name="func_f_lower_field_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Book, index)
sql = index.create_sql(Book, editor)
table = Book._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC", "ASC"])
self.assertEqual(len(constraints[index.name]["columns"]), 3)
self.assertEqual(constraints[index.name]["columns"][2], "pub_date")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "author_id"), True)
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "pub_date"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("title"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@isolate_apps("schema")
def test_func_index_f_decimalfield(self):
class Node(Model):
value = DecimalField(max_digits=5, decimal_places=2)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
index = Index(F("value"), name="func_f_decimalfield_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Node, index)
sql = index.create_sql(Node, editor)
table = Node._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "value"), True)
# SQL doesn't contain casting.
self.assertNotIn("CAST", str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Node, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_cast(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Cast("weight", FloatField()), name="func_cast_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
index = Index(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(BookWithSlug, index)
sql = index.create_sql(BookWithSlug, editor)
table = Book._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@skipIfDBFeature("collate_as_index_expression")
def test_func_index_collate_f_ordered(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(
Collate(F("name").desc(), collation=collation),
name="func_collate_f_desc_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_calc(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("height") / (F("weight") + Value(5)), name="func_calc_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns and expressions.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "weight"), True)
sql = str(sql)
self.assertIs(
sql.index(editor.quote_name("height"))
< sql.index("/")
< sql.index(editor.quote_name("weight"))
< sql.index("+")
< sql.index("5"),
True,
)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index("field__some_key", name="func_json_key_idx")
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform_cast(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index(
Cast(KeyTextTransform("some_key", "field"), IntegerField()),
name="func_json_key_cast_idx",
)
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_index_unsupported(self):
# Index is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("name"), name="random_idx")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_index(Author, index))
self.assertIsNone(editor.remove_index(Author, index))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nonexistent_field(self):
index = Index(Lower("nonexistent"), name="func_nonexistent_idx")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Random(), name="func_random_idx")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_index(Author, index)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "id")
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
"id",
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "slug")
def test_alter_primary_key_the_same_name(self):
with connection.schema_editor() as editor:
editor.create_model(Thing)
old_field = Thing._meta.get_field("when")
new_field = CharField(max_length=2, primary_key=True)
new_field.set_attributes_from_name("when")
new_field.model = Thing
with connection.schema_editor() as editor:
editor.alter_field(Thing, old_field, new_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
with connection.schema_editor() as editor:
editor.alter_field(Thing, new_field, old_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature("can_rollback_ddl")
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(
editor.sql_create_table % {"table": "foo", "definition": ""}
)
@skipUnlessDBFeature("supports_foreign_keys", "indexes_foreign_keys")
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name(
"author_foreign_key_with_really_long_field_name_id"
)
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(
AuthorWithEvenLongerName, CASCADE, related_name="something"
)
new_field.set_attributes_from_name(
"author_other_really_long_named_i_mean_so_long_fk"
)
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_quoted_db_table(self):
class Author(Model):
class Meta:
db_table = '"table_author_double_quoted"'
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
self.isolated_local_models = [Author]
if connection.vendor == "mysql":
self.assertForeignKeyExists(
Book, "author_id", '"table_author_double_quoted"'
)
else:
self.assertForeignKeyExists(Book, "author_id", "table_author_double_quoted")
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
self.local_models = [BookForeignObj]
new_field = ForeignObject(
Author, on_delete=CASCADE, from_fields=["author_id"], to_fields=["id"]
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail(
"Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e
)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, field_class=IntegerField, **kwargs):
kwargs["db_column"] = "CamelCase"
field = field_class(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = "CamelCaseIndex"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
"condition": "",
"include": "",
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
constraint_name = "CamelCaseUniqConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field], constraint_name))
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
if editor.sql_create_fk and connection.features.can_introspect_foreign_keys:
constraint_name = "CamelCaseFKConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_fk
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(
model,
get_field(Author, CASCADE, field_class=ForeignKey),
field,
strict=True,
)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(
item[0],
None if connection.features.interprets_empty_strings_as_nulls else "",
)
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default="surname default")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], "surname default")
# And that the default is no longer set in the database.
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_add_field_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable CharField with a default.
new_field = CharField(max_length=15, blank=True, null=True, default="surname")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "surname"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_add_textfield_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable TextField with a default.
new_field = TextField(blank=True, null=True, default="text")
new_field.set_attributes_from_name("description")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT description FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "description"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field("height")
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_doesnt_perform_queries(self):
"""
No queries are performed if a field default changes and the field's
not changing from null to non-null.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_default = old_field.default * 2
new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_fk_attributes_noop(self):
"""
No queries are performed when changing field attributes that don't
affect the schema.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_field = Book._meta.get_field("author")
new_field = ForeignKey(
Author,
blank=True,
editable=False,
error_messages={"invalid": "error message"},
help_text="help text",
limit_choices_to={"limit": "choice"},
on_delete=PROTECT,
related_name="related_name",
related_query_name="related_query_name",
validators=[lambda x: x],
verbose_name="verbose name",
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, new_field, old_field, strict=True)
def test_alter_field_choices_noop(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(
choices=(("Jane", "Jane"), ("Joe", "Joe")),
max_length=255,
)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, new_field, old_field, strict=True)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851",
"schema_author_nom_de_plume_7570a851_like",
],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851_like",
"schema_author_nom_de_plume_key",
],
)
@skipUnlessDBFeature("supports_comments")
def test_add_db_comment_charfield(self):
comment = "Custom comment"
field = CharField(max_length=255, db_comment=comment)
field.set_attributes_from_name("name_with_comment")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name_with_comment"),
comment,
)
@skipUnlessDBFeature("supports_comments")
def test_add_db_comment_and_default_charfield(self):
comment = "Custom comment with default"
field = CharField(max_length=255, default="Joe Doe", db_comment=comment)
field.set_attributes_from_name("name_with_comment_default")
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name="Before adding a new field")
editor.add_field(Author, field)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name_with_comment_default"),
comment,
)
with connection.cursor() as cursor:
cursor.execute(
f"SELECT name_with_comment_default FROM {Author._meta.db_table};"
)
for row in cursor.fetchall():
self.assertEqual(row[0], "Joe Doe")
@skipUnlessDBFeature("supports_comments")
def test_alter_db_comment(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add comment.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_comment="Custom comment")
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
"Custom comment",
)
# Alter comment.
old_field = new_field
new_field = CharField(max_length=255, db_comment="New custom comment")
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
"New custom comment",
)
# Remove comment.
old_field = new_field
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertIn(
self.get_column_comment(Author._meta.db_table, "name"),
[None, ""],
)
@skipUnlessDBFeature("supports_comments", "supports_foreign_keys")
def test_alter_db_comment_foreign_key(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
comment = "FK custom comment"
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, db_comment=comment)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Book._meta.db_table, "author_id"),
comment,
)
@skipUnlessDBFeature("supports_comments")
def test_alter_field_type_preserve_comment(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
comment = "This is the name."
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_comment=comment)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
comment,
)
# Changing a field type should preserve the comment.
old_field = new_field
new_field = CharField(max_length=511, db_comment=comment)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
# Comment is preserved.
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
comment,
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_comments")
def test_db_comment_table(self):
class ModelWithDbTableComment(Model):
class Meta:
app_label = "schema"
db_table_comment = "Custom table comment"
with connection.schema_editor() as editor:
editor.create_model(ModelWithDbTableComment)
self.isolated_local_models = [ModelWithDbTableComment]
self.assertEqual(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
"Custom table comment",
)
# Alter table comment.
old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment
with connection.schema_editor() as editor:
editor.alter_db_table_comment(
ModelWithDbTableComment, old_db_table_comment, "New table comment"
)
self.assertEqual(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
"New table comment",
)
# Remove table comment.
old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment
with connection.schema_editor() as editor:
editor.alter_db_table_comment(
ModelWithDbTableComment, old_db_table_comment, None
)
self.assertIn(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
[None, ""],
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_comments", "supports_foreign_keys")
def test_db_comments_from_abstract_model(self):
class AbstractModelWithDbComments(Model):
name = CharField(
max_length=255, db_comment="Custom comment", null=True, blank=True
)
class Meta:
app_label = "schema"
abstract = True
db_table_comment = "Custom table comment"
class ModelWithDbComments(AbstractModelWithDbComments):
pass
with connection.schema_editor() as editor:
editor.create_model(ModelWithDbComments)
self.isolated_local_models = [ModelWithDbComments]
self.assertEqual(
self.get_column_comment(ModelWithDbComments._meta.db_table, "name"),
"Custom comment",
)
self.assertEqual(
self.get_table_comment(ModelWithDbComments._meta.db_table),
"Custom table comment",
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617", "schema_author_name_1fbc5617_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617_like", "schema_author_name_1fbc5617_uniq"],
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field("info")
new_field = TextField(db_index=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, "info"),
["schema_note_info_4b0ea695", "schema_note_info_4b0ea695_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"), []
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to add db_index=True
old_field = Tag._meta.get_field("slug")
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field("weight")
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name("weight")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "weight"),
["schema_author_weight_587740f9"],
)
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("node_id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
self.assertForeignKeyExists(Node, "parent_id", Node._meta.db_table)
@mock.patch("django.db.backends.base.schema.datetime")
@mock.patch("django.db.backends.base.schema.timezone")
def test_add_datefield_and_datetimefield_use_effective_default(
self, mocked_datetime, mocked_tz
):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_now_add is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(
month=1, day=1, year=2000, hour=1, minute=1, tzinfo=datetime.timezone.utc
)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name("dob_auto_now")
self.check_added_field_default(
editor,
Author,
dob_auto_now,
"dob_auto_now",
now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name("dob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dob_auto_now_add,
"dob_auto_now_add",
now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name("dtob_auto_now")
self.check_added_field_default(
editor,
Author,
dtob_auto_now,
"dtob_auto_now",
now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name("dtob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dt_tm_of_birth_auto_now_add,
"dtob_auto_now_add",
now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name("tob_auto_now")
self.check_added_field_default(
editor,
Author,
tob_auto_now,
"tob_auto_now",
now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name("tob_auto_now_add")
self.check_added_field_default(
editor,
Author,
tob_auto_now_add,
"tob_auto_now_add",
now.time(),
cast_function=lambda x: x.time(),
)
def test_namespaced_db_table_create_index_name(self):
"""
Table names are stripped of their namespace/schema before being used to
generate index names.
"""
with connection.schema_editor() as editor:
max_name_length = connection.ops.max_name_length() or 200
namespace = "n" * max_name_length
table_name = "t" * max_name_length
namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
self.assertEqual(
editor._create_index_name(table_name, []),
editor._create_index_name(namespaced_table_name, []),
)
@unittest.skipUnless(
connection.vendor == "oracle", "Oracle specific db_table syntax"
)
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.isolated_local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name="Test Name")
student = Student.objects.create(name="Some man")
doc.students.add(student)
@isolate_apps("schema")
@unittest.skipUnless(
connection.vendor == "postgresql", "PostgreSQL specific db_table syntax."
)
def test_namespaced_db_table_foreign_key_reference(self):
with connection.cursor() as cursor:
cursor.execute("CREATE SCHEMA django_schema_tests")
def delete_schema():
with connection.cursor() as cursor:
cursor.execute("DROP SCHEMA django_schema_tests CASCADE")
self.addCleanup(delete_schema)
class Author(Model):
class Meta:
app_label = "schema"
class Book(Model):
class Meta:
app_label = "schema"
db_table = '"django_schema_tests"."schema_book"'
author = ForeignKey(Author, CASCADE)
author.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.add_field(Book, author)
def test_rename_table_renames_deferred_sql_references(self):
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.alter_db_table(Author, "schema_author", "schema_renamed_author")
editor.alter_db_table(Author, "schema_book", "schema_renamed_book")
try:
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_table("schema_author"), False)
self.assertIs(statement.references_table("schema_book"), False)
finally:
editor.alter_db_table(Author, "schema_renamed_author", "schema_author")
editor.alter_db_table(Author, "schema_renamed_book", "schema_book")
def test_rename_column_renames_deferred_sql_references(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_title = Book._meta.get_field("title")
new_title = CharField(max_length=100, db_index=True)
new_title.set_attributes_from_name("renamed_title")
editor.alter_field(Book, old_title, new_title)
old_author = Book._meta.get_field("author")
new_author = ForeignKey(Author, CASCADE)
new_author.set_attributes_from_name("renamed_author")
editor.alter_field(Book, old_author, new_author)
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_column("book", "title"), False)
self.assertIs(statement.references_column("book", "author_id"), False)
@isolate_apps("schema")
def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the field
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Foo, Foo._meta.get_field("field"), new_field)
@isolate_apps("schema")
def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the table
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Foo, Foo._meta.db_table, "renamed_table")
Foo._meta.db_table = "renamed_table"
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_db_collation_charfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = CharField(max_length=255, db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_textfield")
def test_db_collation_textfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = TextField(db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_add_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("alias")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["alias"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["alias"][1][8], collation)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertIsNone(self.get_column_collation(Author._meta.db_table, "name"))
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_field_type_preserve_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
# Changing a field type should preserve the collation.
old_field = new_field
new_field = CharField(max_length=511, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
# Collation is preserved.
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_primary_key_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Thing)
old_field = Thing._meta.get_field("when")
new_field = CharField(max_length=1, db_collation=collation, primary_key=True)
new_field.set_attributes_from_name("when")
new_field.model = Thing
with connection.schema_editor() as editor:
editor.alter_field(Thing, old_field, new_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
self.assertEqual(
self.get_column_collation(Thing._meta.db_table, "when"),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Thing, new_field, old_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
self.assertIsNone(self.get_column_collation(Thing._meta.db_table, "when"))
@skipUnlessDBFeature(
"supports_collation_on_charfield", "supports_collation_on_textfield"
)
def test_alter_field_type_and_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("info")
new_field.model = Note
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(
columns["info"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["info"][1][8], collation)
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["info"][0], "TextField")
self.assertIsNone(columns["info"][1][8])
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_ci_cs_db_collation(self):
cs_collation = connection.features.test_collations.get("cs")
ci_collation = connection.features.test_collations.get("ci")
try:
if connection.vendor == "mysql":
cs_collation = "latin1_general_cs"
elif connection.vendor == "postgresql":
cs_collation = "en-x-icu"
with connection.cursor() as cursor:
cursor.execute(
"CREATE COLLATION IF NOT EXISTS case_insensitive "
"(provider = icu, locale = 'und-u-ks-level2', "
"deterministic = false)"
)
ci_collation = "case_insensitive"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Case-insensitive collation.
old_field = Author._meta.get_field("name")
new_field_ci = CharField(max_length=255, db_collation=ci_collation)
new_field_ci.set_attributes_from_name("name")
new_field_ci.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field_ci, strict=True)
Author.objects.create(name="ANDREW")
self.assertIs(Author.objects.filter(name="Andrew").exists(), True)
# Case-sensitive collation.
new_field_cs = CharField(max_length=255, db_collation=cs_collation)
new_field_cs.set_attributes_from_name("name")
new_field_cs.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field_ci, new_field_cs, strict=True)
self.assertIs(Author.objects.filter(name="Andrew").exists(), False)
finally:
if connection.vendor == "postgresql":
with connection.cursor() as cursor:
cursor.execute("DROP COLLATION IF EXISTS case_insensitive")
|
18591da75b4ea08b2e0dfefe014ae3699f86879cd1552a966c6e2d27f846582d | from django.db import connection
from django.test import TestCase
class SchemaLoggerTests(TestCase):
def test_extra_args(self):
editor = connection.schema_editor(collect_sql=True)
sql = "SELECT * FROM foo WHERE id in (%s, %s)"
params = [42, 1337]
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
editor.execute(sql, params)
if connection.features.schema_editor_uses_clientside_param_binding:
sql = "SELECT * FROM foo WHERE id in (42, 1337)"
params = None
self.assertEqual(cm.records[0].sql, sql)
self.assertEqual(cm.records[0].params, params)
self.assertEqual(cm.records[0].getMessage(), f"{sql}; (params {params})")
|
f4aab8937bc6bfd741cc70eb4a6ee35c08ff48d195b1d86cf8bc62885353844f | from django.db.models import FilteredRelation
from django.test import TestCase
from .models import Organiser, Pool, PoolStyle, Tournament
class ExistingRelatedInstancesTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.t1 = Tournament.objects.create(name="Tourney 1")
cls.t2 = Tournament.objects.create(name="Tourney 2")
cls.o1 = Organiser.objects.create(name="Organiser 1")
cls.p1 = Pool.objects.create(
name="T1 Pool 1", tournament=cls.t1, organiser=cls.o1
)
cls.p2 = Pool.objects.create(
name="T1 Pool 2", tournament=cls.t1, organiser=cls.o1
)
cls.p3 = Pool.objects.create(
name="T2 Pool 1", tournament=cls.t2, organiser=cls.o1
)
cls.p4 = Pool.objects.create(
name="T2 Pool 2", tournament=cls.t2, organiser=cls.o1
)
cls.ps1 = PoolStyle.objects.create(name="T1 Pool 2 Style", pool=cls.p2)
cls.ps2 = PoolStyle.objects.create(name="T2 Pool 1 Style", pool=cls.p3)
cls.ps3 = PoolStyle.objects.create(
name="T1 Pool 1/3 Style", pool=cls.p1, another_pool=cls.p3
)
def test_foreign_key(self):
with self.assertNumQueries(2):
tournament = Tournament.objects.get(pk=self.t1.pk)
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_prefetch_related(self):
with self.assertNumQueries(2):
tournament = Tournament.objects.prefetch_related("pool_set").get(
pk=self.t1.pk
)
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_multiple_prefetch(self):
with self.assertNumQueries(2):
tournaments = list(
Tournament.objects.prefetch_related("pool_set").order_by("pk")
)
pool1 = tournaments[0].pool_set.all()[0]
self.assertIs(tournaments[0], pool1.tournament)
pool2 = tournaments[1].pool_set.all()[0]
self.assertIs(tournaments[1], pool2.tournament)
def test_queryset_or(self):
tournament_1 = self.t1
tournament_2 = self.t2
with self.assertNumQueries(1):
pools = tournament_1.pool_set.all() | tournament_2.pool_set.all()
related_objects = {pool.tournament for pool in pools}
self.assertEqual(related_objects, {tournament_1, tournament_2})
def test_queryset_or_different_cached_items(self):
tournament = self.t1
organiser = self.o1
with self.assertNumQueries(1):
pools = tournament.pool_set.all() | organiser.pool_set.all()
first = pools.filter(pk=self.p1.pk)[0]
self.assertIs(first.tournament, tournament)
self.assertIs(first.organiser, organiser)
def test_queryset_or_only_one_with_precache(self):
tournament_1 = self.t1
tournament_2 = self.t2
# 2 queries here as pool 3 has tournament 2, which is not cached
with self.assertNumQueries(2):
pools = tournament_1.pool_set.all() | Pool.objects.filter(pk=self.p3.pk)
related_objects = {pool.tournament for pool in pools}
self.assertEqual(related_objects, {tournament_1, tournament_2})
# and the other direction
with self.assertNumQueries(2):
pools = Pool.objects.filter(pk=self.p3.pk) | tournament_1.pool_set.all()
related_objects = {pool.tournament for pool in pools}
self.assertEqual(related_objects, {tournament_1, tournament_2})
def test_queryset_and(self):
tournament = self.t1
organiser = self.o1
with self.assertNumQueries(1):
pools = tournament.pool_set.all() & organiser.pool_set.all()
first = pools.filter(pk=self.p1.pk)[0]
self.assertIs(first.tournament, tournament)
self.assertIs(first.organiser, organiser)
def test_one_to_one(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.get(pk=self.ps1.pk)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_select_related(self):
with self.assertNumQueries(1):
style = PoolStyle.objects.select_related("pool").get(pk=self.ps1.pk)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
poolstyles = list(PoolStyle.objects.select_related("pool").order_by("pk"))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.prefetch_related("pool").get(pk=self.ps1.pk)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
poolstyles = list(PoolStyle.objects.prefetch_related("pool").order_by("pk"))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_reverse_one_to_one(self):
with self.assertNumQueries(2):
pool = Pool.objects.get(pk=self.p2.pk)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_select_related(self):
with self.assertNumQueries(1):
pool = Pool.objects.select_related("poolstyle").get(pk=self.p2.pk)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
pool = Pool.objects.prefetch_related("poolstyle").get(pk=self.p2.pk)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
pools = list(Pool.objects.select_related("poolstyle").order_by("pk"))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
def test_reverse_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
pools = list(Pool.objects.prefetch_related("poolstyle").order_by("pk"))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
def test_reverse_fk_select_related_multiple(self):
with self.assertNumQueries(1):
ps = list(
PoolStyle.objects.annotate(
pool_1=FilteredRelation("pool"),
pool_2=FilteredRelation("another_pool"),
)
.select_related("pool_1", "pool_2")
.order_by("-pk")
)
self.assertIs(ps[0], ps[0].pool_1.poolstyle)
self.assertIs(ps[0], ps[0].pool_2.another_style)
|
0748101b4d0d673144bf011b3634d9e735771c54465d3d390679cb0c44b2c10d | """
Existing related object instance caching.
Queries are not redone when going back through known relations.
"""
from django.db import models
class Tournament(models.Model):
name = models.CharField(max_length=30)
class Organiser(models.Model):
name = models.CharField(max_length=30)
class Pool(models.Model):
name = models.CharField(max_length=30)
tournament = models.ForeignKey(Tournament, models.CASCADE)
organiser = models.ForeignKey(Organiser, models.CASCADE)
class PoolStyle(models.Model):
name = models.CharField(max_length=30)
pool = models.OneToOneField(Pool, models.CASCADE)
another_pool = models.OneToOneField(
Pool, models.CASCADE, null=True, related_name="another_style"
)
|
14f87385175d2c71f546726e8e0841e1a6d1e20b3e4192711e051666430df7a5 | import datetime
from decimal import Decimal
from django.contrib.humanize.templatetags import humanize
from django.template import Context, Template, defaultfilters
from django.test import SimpleTestCase, modify_settings, override_settings
from django.utils import translation
from django.utils.html import escape
from django.utils.timezone import get_fixed_timezone
from django.utils.translation import gettext as _
# Mock out datetime in some tests so they don't fail occasionally when they
# run too slow. Use a fixed datetime for datetime.now(). DST change in
# America/Chicago (the default time zone) happened on March 11th in 2012.
now = datetime.datetime(2012, 3, 9, 22, 30)
class MockDateTime(datetime.datetime):
@classmethod
def now(cls, tz=None):
if tz is None or tz.utcoffset(now) is None:
return now
else:
# equals now.replace(tzinfo=utc)
return now.replace(tzinfo=tz) + tz.utcoffset(now)
@modify_settings(INSTALLED_APPS={"append": "django.contrib.humanize"})
class HumanizeTests(SimpleTestCase):
def humanize_tester(
self, test_list, result_list, method, normalize_result_func=escape
):
for test_content, result in zip(test_list, result_list):
with self.subTest(test_content):
t = Template("{%% load humanize %%}{{ test_content|%s }}" % method)
rendered = t.render(Context(locals())).strip()
self.assertEqual(
rendered,
normalize_result_func(result),
msg="%s test failed, produced '%s', should've produced '%s'"
% (method, rendered, result),
)
def test_ordinal(self):
test_list = (
"1",
"2",
"3",
"4",
"11",
"12",
"13",
"101",
"102",
"103",
"111",
"something else",
None,
)
result_list = (
"1st",
"2nd",
"3rd",
"4th",
"11th",
"12th",
"13th",
"101st",
"102nd",
"103rd",
"111th",
"something else",
None,
)
with translation.override("en"):
self.humanize_tester(test_list, result_list, "ordinal")
def test_i18n_html_ordinal(self):
"""Allow html in output on i18n strings"""
test_list = (
"1",
"2",
"3",
"4",
"11",
"12",
"13",
"101",
"102",
"103",
"111",
"something else",
None,
)
result_list = (
"1<sup>er</sup>",
"2<sup>e</sup>",
"3<sup>e</sup>",
"4<sup>e</sup>",
"11<sup>e</sup>",
"12<sup>e</sup>",
"13<sup>e</sup>",
"101<sup>er</sup>",
"102<sup>e</sup>",
"103<sup>e</sup>",
"111<sup>e</sup>",
"something else",
"None",
)
with translation.override("fr-fr"):
self.humanize_tester(test_list, result_list, "ordinal", lambda x: x)
def test_intcomma(self):
test_list = (
100,
1000,
10123,
10311,
1000000,
1234567.25,
"100",
"1000",
"10123",
"10311",
"1000000",
"1234567.1234567",
Decimal("1234567.1234567"),
None,
"1234567",
"1234567.12",
)
result_list = (
"100",
"1,000",
"10,123",
"10,311",
"1,000,000",
"1,234,567.25",
"100",
"1,000",
"10,123",
"10,311",
"1,000,000",
"1,234,567.1234567",
"1,234,567.1234567",
None,
"1,234,567",
"1,234,567.12",
)
with translation.override("en"):
self.humanize_tester(test_list, result_list, "intcomma")
def test_l10n_intcomma(self):
test_list = (
100,
1000,
10123,
10311,
1000000,
1234567.25,
"100",
"1000",
"10123",
"10311",
"1000000",
"1234567.1234567",
Decimal("1234567.1234567"),
None,
"1234567",
"1234567.12",
)
result_list = (
"100",
"1,000",
"10,123",
"10,311",
"1,000,000",
"1,234,567.25",
"100",
"1,000",
"10,123",
"10,311",
"1,000,000",
"1,234,567.1234567",
"1,234,567.1234567",
None,
"1,234,567",
"1,234,567.12",
)
with self.settings(USE_THOUSAND_SEPARATOR=False):
with translation.override("en"):
self.humanize_tester(test_list, result_list, "intcomma")
def test_intcomma_without_number_grouping(self):
# Regression for #17414
with translation.override("ja"):
self.humanize_tester([100], ["100"], "intcomma")
def test_intword(self):
# Positive integers.
test_list_positive = (
"100",
"1000000",
"1200000",
"1290000",
"1000000000",
"2000000000",
"6000000000000",
"1300000000000000",
"3500000000000000000000",
"8100000000000000000000000000000000",
("1" + "0" * 100),
("1" + "0" * 104),
)
result_list_positive = (
"100",
"1.0 million",
"1.2 million",
"1.3 million",
"1.0 billion",
"2.0 billion",
"6.0 trillion",
"1.3 quadrillion",
"3.5 sextillion",
"8.1 decillion",
"1.0 googol",
("1" + "0" * 104),
)
# Negative integers.
test_list_negative = ("-" + test for test in test_list_positive)
result_list_negative = ("-" + result for result in result_list_positive)
with translation.override("en"):
self.humanize_tester(
(*test_list_positive, *test_list_negative, None),
(*result_list_positive, *result_list_negative, None),
"intword",
)
def test_i18n_intcomma(self):
test_list = (
100,
1000,
10123,
10311,
1000000,
1234567.25,
"100",
"1000",
"10123",
"10311",
"1000000",
None,
)
result_list = (
"100",
"1.000",
"10.123",
"10.311",
"1.000.000",
"1.234.567,25",
"100",
"1.000",
"10.123",
"10.311",
"1.000.000",
None,
)
with self.settings(USE_THOUSAND_SEPARATOR=True):
with translation.override("de"):
self.humanize_tester(test_list, result_list, "intcomma")
def test_i18n_intword(self):
# Positive integers.
test_list_positive = (
"100",
"1000000",
"1200000",
"1290000",
"1000000000",
"2000000000",
"6000000000000",
)
result_list_positive = (
"100",
"1,0 Million",
"1,2 Millionen",
"1,3 Millionen",
"1,0 Milliarde",
"2,0 Milliarden",
"6,0 Billionen",
)
# Negative integers.
test_list_negative = ("-" + test for test in test_list_positive)
result_list_negative = ("-" + result for result in result_list_positive)
with self.settings(USE_THOUSAND_SEPARATOR=True):
with translation.override("de"):
self.humanize_tester(
(*test_list_positive, *test_list_negative),
(*result_list_positive, *result_list_negative),
"intword",
)
def test_apnumber(self):
test_list = [str(x) for x in range(1, 11)]
test_list.append(None)
result_list = (
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"10",
None,
)
with translation.override("en"):
self.humanize_tester(test_list, result_list, "apnumber")
def test_naturalday(self):
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
someday = today - datetime.timedelta(days=10)
notdate = "I'm not a date value"
test_list = (today, yesterday, tomorrow, someday, notdate, None)
someday_result = defaultfilters.date(someday)
result_list = (
_("today"),
_("yesterday"),
_("tomorrow"),
someday_result,
"I'm not a date value",
None,
)
self.humanize_tester(test_list, result_list, "naturalday")
def test_naturalday_tz(self):
today = datetime.date.today()
tz_one = get_fixed_timezone(-720)
tz_two = get_fixed_timezone(720)
# Can be today or yesterday
date_one = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_one)
naturalday_one = humanize.naturalday(date_one)
# Can be today or tomorrow
date_two = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_two)
naturalday_two = humanize.naturalday(date_two)
# As 24h of difference they will never be the same
self.assertNotEqual(naturalday_one, naturalday_two)
def test_naturalday_uses_localtime(self):
# Regression for #18504
# This is 2012-03-08HT19:30:00-06:00 in America/Chicago
dt = datetime.datetime(2012, 3, 9, 1, 30, tzinfo=datetime.timezone.utc)
orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime
try:
with override_settings(TIME_ZONE="America/Chicago", USE_TZ=True):
with translation.override("en"):
self.humanize_tester([dt], ["yesterday"], "naturalday")
finally:
humanize.datetime = orig_humanize_datetime
def test_naturaltime(self):
class naive(datetime.tzinfo):
def utcoffset(self, dt):
return None
test_list = [
"test",
now,
now - datetime.timedelta(microseconds=1),
now - datetime.timedelta(seconds=1),
now - datetime.timedelta(seconds=30),
now - datetime.timedelta(minutes=1, seconds=30),
now - datetime.timedelta(minutes=2),
now - datetime.timedelta(hours=1, minutes=30, seconds=30),
now - datetime.timedelta(hours=23, minutes=50, seconds=50),
now - datetime.timedelta(days=1),
now - datetime.timedelta(days=500),
now + datetime.timedelta(seconds=1),
now + datetime.timedelta(seconds=30),
now + datetime.timedelta(minutes=1, seconds=30),
now + datetime.timedelta(minutes=2),
now + datetime.timedelta(hours=1, minutes=30, seconds=30),
now + datetime.timedelta(hours=23, minutes=50, seconds=50),
now + datetime.timedelta(days=1),
now + datetime.timedelta(days=2, hours=6),
now + datetime.timedelta(days=500),
now.replace(tzinfo=naive()),
now.replace(tzinfo=datetime.timezone.utc),
]
result_list = [
"test",
"now",
"now",
"a second ago",
"30\xa0seconds ago",
"a minute ago",
"2\xa0minutes ago",
"an hour ago",
"23\xa0hours ago",
"1\xa0day ago",
"1\xa0year, 4\xa0months ago",
"a second from now",
"30\xa0seconds from now",
"a minute from now",
"2\xa0minutes from now",
"an hour from now",
"23\xa0hours from now",
"1\xa0day from now",
"2\xa0days, 6\xa0hours from now",
"1\xa0year, 4\xa0months from now",
"now",
"now",
]
# Because of the DST change, 2 days and 6 hours after the chosen
# date in naive arithmetic is only 2 days and 5 hours after in
# aware arithmetic.
result_list_with_tz_support = result_list[:]
assert result_list_with_tz_support[-4] == "2\xa0days, 6\xa0hours from now"
result_list_with_tz_support[-4] == "2\xa0days, 5\xa0hours from now"
orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime
try:
with translation.override("en"):
self.humanize_tester(test_list, result_list, "naturaltime")
with override_settings(USE_TZ=True):
self.humanize_tester(
test_list, result_list_with_tz_support, "naturaltime"
)
finally:
humanize.datetime = orig_humanize_datetime
def test_naturaltime_as_documented(self):
"""
#23340 -- Verify the documented behavior of humanize.naturaltime.
"""
time_format = "%d %b %Y %H:%M:%S"
documented_now = datetime.datetime.strptime("17 Feb 2007 16:30:00", time_format)
test_data = (
("17 Feb 2007 16:30:00", "now"),
("17 Feb 2007 16:29:31", "29 seconds ago"),
("17 Feb 2007 16:29:00", "a minute ago"),
("17 Feb 2007 16:25:35", "4 minutes ago"),
("17 Feb 2007 15:30:29", "59 minutes ago"),
("17 Feb 2007 15:30:01", "59 minutes ago"),
("17 Feb 2007 15:30:00", "an hour ago"),
("17 Feb 2007 13:31:29", "2 hours ago"),
("16 Feb 2007 13:31:29", "1 day, 2 hours ago"),
("16 Feb 2007 13:30:01", "1 day, 2 hours ago"),
("16 Feb 2007 13:30:00", "1 day, 3 hours ago"),
("17 Feb 2007 16:30:30", "30 seconds from now"),
("17 Feb 2007 16:30:29", "29 seconds from now"),
("17 Feb 2007 16:31:00", "a minute from now"),
("17 Feb 2007 16:34:35", "4 minutes from now"),
("17 Feb 2007 17:30:29", "an hour from now"),
("17 Feb 2007 18:31:29", "2 hours from now"),
("18 Feb 2007 16:31:29", "1 day from now"),
("26 Feb 2007 18:31:29", "1 week, 2 days from now"),
)
class DocumentedMockDateTime(datetime.datetime):
@classmethod
def now(cls, tz=None):
if tz is None or tz.utcoffset(documented_now) is None:
return documented_now
else:
return documented_now.replace(tzinfo=tz) + tz.utcoffset(now)
orig_humanize_datetime = humanize.datetime
humanize.datetime = DocumentedMockDateTime
try:
for test_time_string, expected_natural_time in test_data:
with self.subTest(test_time_string):
test_time = datetime.datetime.strptime(
test_time_string, time_format
)
natural_time = humanize.naturaltime(test_time).replace("\xa0", " ")
self.assertEqual(expected_natural_time, natural_time)
finally:
humanize.datetime = orig_humanize_datetime
def test_inflection_for_timedelta(self):
"""
Translation of '%d day'/'%d month'/… may differ depending on the context
of the string it is inserted in.
"""
test_list = [
# "%(delta)s ago" translations
now - datetime.timedelta(days=1),
now - datetime.timedelta(days=2),
now - datetime.timedelta(days=30),
now - datetime.timedelta(days=60),
now - datetime.timedelta(days=500),
now - datetime.timedelta(days=865),
# "%(delta)s from now" translations
now + datetime.timedelta(days=1),
now + datetime.timedelta(days=2),
now + datetime.timedelta(days=31),
now + datetime.timedelta(days=61),
now + datetime.timedelta(days=500),
now + datetime.timedelta(days=865),
]
result_list = [
"před 1\xa0dnem",
"před 2\xa0dny",
"před 1\xa0měsícem",
"před 2\xa0měsíci",
"před 1\xa0rokem, 4\xa0měsíci",
"před 2\xa0lety, 4\xa0měsíci",
"za 1\xa0den",
"za 2\xa0dny",
"za 1\xa0měsíc",
"za 2\xa0měsíce",
"za 1\xa0rok, 4\xa0měsíce",
"za 2\xa0roky, 4\xa0měsíce",
]
orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime
try:
# Choose a language with different
# naturaltime-past/naturaltime-future translations.
with translation.override("cs"):
self.humanize_tester(test_list, result_list, "naturaltime")
finally:
humanize.datetime = orig_humanize_datetime
|
2c83f7e0eed35e8e3f26c6802afe4ee5ee39b527d020e41fb80620a34895be6a | import threading
from asgiref.sync import async_to_sync, iscoroutinefunction
from django.contrib.admindocs.middleware import XViewMiddleware
from django.contrib.auth.middleware import (
AuthenticationMiddleware,
RemoteUserMiddleware,
)
from django.contrib.flatpages.middleware import FlatpageFallbackMiddleware
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.redirects.middleware import RedirectFallbackMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.middleware import CurrentSiteMiddleware
from django.db import connection
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.middleware.cache import (
CacheMiddleware,
FetchFromCacheMiddleware,
UpdateCacheMiddleware,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import BrokenLinkEmailsMiddleware, CommonMiddleware
from django.middleware.csrf import CsrfViewMiddleware
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.middleware.locale import LocaleMiddleware
from django.middleware.security import SecurityMiddleware
from django.test import SimpleTestCase
from django.utils.deprecation import MiddlewareMixin
class MiddlewareMixinTests(SimpleTestCase):
middlewares = [
AuthenticationMiddleware,
BrokenLinkEmailsMiddleware,
CacheMiddleware,
CommonMiddleware,
ConditionalGetMiddleware,
CsrfViewMiddleware,
CurrentSiteMiddleware,
FetchFromCacheMiddleware,
FlatpageFallbackMiddleware,
GZipMiddleware,
LocaleMiddleware,
MessageMiddleware,
RedirectFallbackMiddleware,
RemoteUserMiddleware,
SecurityMiddleware,
SessionMiddleware,
UpdateCacheMiddleware,
XFrameOptionsMiddleware,
XViewMiddleware,
]
def test_repr(self):
class GetResponse:
def __call__(self):
return HttpResponse()
def get_response():
return HttpResponse()
self.assertEqual(
repr(MiddlewareMixin(GetResponse())),
"<MiddlewareMixin get_response=GetResponse>",
)
self.assertEqual(
repr(MiddlewareMixin(get_response)),
"<MiddlewareMixin get_response="
"MiddlewareMixinTests.test_repr.<locals>.get_response>",
)
self.assertEqual(
repr(CsrfViewMiddleware(GetResponse())),
"<CsrfViewMiddleware get_response=GetResponse>",
)
self.assertEqual(
repr(CsrfViewMiddleware(get_response)),
"<CsrfViewMiddleware get_response="
"MiddlewareMixinTests.test_repr.<locals>.get_response>",
)
def test_passing_explicit_none(self):
msg = "get_response must be provided."
for middleware in self.middlewares:
with self.subTest(middleware=middleware):
with self.assertRaisesMessage(ValueError, msg):
middleware(None)
def test_coroutine(self):
async def async_get_response(request):
return HttpResponse()
def sync_get_response(request):
return HttpResponse()
for middleware in self.middlewares:
with self.subTest(middleware=middleware.__qualname__):
# Middleware appears as coroutine if get_function is
# a coroutine.
middleware_instance = middleware(async_get_response)
self.assertIs(iscoroutinefunction(middleware_instance), True)
# Middleware doesn't appear as coroutine if get_function is not
# a coroutine.
middleware_instance = middleware(sync_get_response)
self.assertIs(iscoroutinefunction(middleware_instance), False)
def test_sync_to_async_uses_base_thread_and_connection(self):
"""
The process_request() and process_response() hooks must be called with
the sync_to_async thread_sensitive flag enabled, so that database
operations use the correct thread and connection.
"""
def request_lifecycle():
"""Fake request_started/request_finished."""
return (threading.get_ident(), id(connection))
async def get_response(self):
return HttpResponse()
class SimpleMiddleWare(MiddlewareMixin):
def process_request(self, request):
request.thread_and_connection = request_lifecycle()
def process_response(self, request, response):
response.thread_and_connection = request_lifecycle()
return response
threads_and_connections = []
threads_and_connections.append(request_lifecycle())
request = HttpRequest()
response = async_to_sync(SimpleMiddleWare(get_response))(request)
threads_and_connections.append(request.thread_and_connection)
threads_and_connections.append(response.thread_and_connection)
threads_and_connections.append(request_lifecycle())
self.assertEqual(len(threads_and_connections), 4)
self.assertEqual(len(set(threads_and_connections)), 1)
|
8a0e556e6efcc45de80a1a99901f18abcc57787dac7f268afc52ed36de9abe5f | import base64
import hashlib
import os
import shutil
import sys
import tempfile as sys_tempfile
import unittest
from io import BytesIO, StringIO
from unittest import mock
from urllib.parse import quote
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import temp as tempfile
from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile
from django.http.multipartparser import (
FILE,
MultiPartParser,
MultiPartParserError,
Parser,
)
from django.test import SimpleTestCase, TestCase, client, override_settings
from . import uploadhandler
from .models import FileModel
UNICODE_FILENAME = "test-0123456789_中文_Orléans.jpg"
MEDIA_ROOT = sys_tempfile.mkdtemp()
UPLOAD_FOLDER = "test_upload"
UPLOAD_TO = os.path.join(MEDIA_ROOT, UPLOAD_FOLDER)
CANDIDATE_TRAVERSAL_FILE_NAMES = [
"/tmp/hax0rd.txt", # Absolute path, *nix-style.
"C:\\Windows\\hax0rd.txt", # Absolute path, win-style.
"C:/Windows/hax0rd.txt", # Absolute path, broken-style.
"\\tmp\\hax0rd.txt", # Absolute path, broken in a different way.
"/tmp\\hax0rd.txt", # Absolute path, broken by mixing.
"subdir/hax0rd.txt", # Descendant path, *nix-style.
"subdir\\hax0rd.txt", # Descendant path, win-style.
"sub/dir\\hax0rd.txt", # Descendant path, mixed.
"../../hax0rd.txt", # Relative path, *nix-style.
"..\\..\\hax0rd.txt", # Relative path, win-style.
"../..\\hax0rd.txt", # Relative path, mixed.
"../hax0rd.txt", # HTML entities.
"../hax0rd.txt", # HTML entities.
]
CANDIDATE_INVALID_FILE_NAMES = [
"/tmp/", # Directory, *nix-style.
"c:\\tmp\\", # Directory, win-style.
"/tmp/.", # Directory dot, *nix-style.
"c:\\tmp\\.", # Directory dot, *nix-style.
"/tmp/..", # Parent directory, *nix-style.
"c:\\tmp\\..", # Parent directory, win-style.
"", # Empty filename.
]
@override_settings(
MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF="file_uploads.urls", MIDDLEWARE=[]
)
class FileUploadTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
os.makedirs(MEDIA_ROOT, exist_ok=True)
cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT)
def test_upload_name_is_validated(self):
candidates = [
"/tmp/",
"/tmp/..",
"/tmp/.",
]
if sys.platform == "win32":
candidates.extend(
[
"c:\\tmp\\",
"c:\\tmp\\..",
"c:\\tmp\\.",
]
)
for file_name in candidates:
with self.subTest(file_name=file_name):
self.assertRaises(SuspiciousFileOperation, UploadedFile, name=file_name)
def test_simple_upload(self):
with open(__file__, "rb") as fp:
post_data = {
"name": "Ringo",
"file_field": fp,
}
response = self.client.post("/upload/", post_data)
self.assertEqual(response.status_code, 200)
def test_large_upload(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".file1") as file1, file(suffix=".file2") as file2:
file1.write(b"a" * (2**21))
file1.seek(0)
file2.write(b"a" * (10 * 2**20))
file2.seek(0)
post_data = {
"name": "Ringo",
"file_field1": file1,
"file_field2": file2,
}
for key in list(post_data):
try:
post_data[key + "_hash"] = hashlib.sha1(
post_data[key].read()
).hexdigest()
post_data[key].seek(0)
except AttributeError:
post_data[key + "_hash"] = hashlib.sha1(
post_data[key].encode()
).hexdigest()
response = self.client.post("/verify/", post_data)
self.assertEqual(response.status_code, 200)
def _test_base64_upload(self, content, encode=base64.b64encode):
payload = client.FakePayload(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="test.txt"',
"Content-Type: application/octet-stream",
"Content-Transfer-Encoding: base64",
"",
]
)
)
payload.write(b"\r\n" + encode(content.encode()) + b"\r\n")
payload.write("--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo_content/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.json()["file"], content)
def test_base64_upload(self):
self._test_base64_upload("This data will be transmitted base64-encoded.")
def test_big_base64_upload(self):
self._test_base64_upload("Big data" * 68000) # > 512Kb
def test_big_base64_newlines_upload(self):
self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes)
def test_base64_invalid_upload(self):
payload = client.FakePayload(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="test.txt"',
"Content-Type: application/octet-stream",
"Content-Transfer-Encoding: base64",
"",
]
)
)
payload.write(b"\r\n!\r\n")
payload.write("--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo_content/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.json()["file"], "")
def test_unicode_file_name(self):
with sys_tempfile.TemporaryDirectory() as temp_dir:
# This file contains Chinese symbols and an accented char in the name.
with open(os.path.join(temp_dir, UNICODE_FILENAME), "w+b") as file1:
file1.write(b"b" * (2**10))
file1.seek(0)
response = self.client.post("/unicode_name/", {"file_unicode": file1})
self.assertEqual(response.status_code, 200)
def test_unicode_file_name_rfc2231(self):
"""
Receiving file upload when filename is encoded with RFC 2231.
"""
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file_unicode"; '
"filename*=UTF-8''%s" % quote(UNICODE_FILENAME),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
"\r\n--" + client.BOUNDARY + "--\r\n",
]
)
)
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/unicode_name/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_name_rfc2231(self):
"""
Receiving file upload when filename is encoded with RFC 2231.
"""
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
"Content-Disposition: form-data; name*=UTF-8''file_unicode; "
"filename*=UTF-8''%s" % quote(UNICODE_FILENAME),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
"\r\n--" + client.BOUNDARY + "--\r\n",
]
)
)
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/unicode_name/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_file_name_rfc2231_with_double_quotes(self):
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file_unicode"; '
"filename*=\"UTF-8''%s\"" % quote(UNICODE_FILENAME),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
"\r\n--" + client.BOUNDARY + "--\r\n",
]
)
)
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/unicode_name/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_name_rfc2231_with_double_quotes(self):
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
"Content-Disposition: form-data; name*=\"UTF-8''file_unicode\"; "
"filename*=\"UTF-8''%s\"" % quote(UNICODE_FILENAME),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
"\r\n--" + client.BOUNDARY + "--\r\n",
]
)
)
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/unicode_name/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_blank_filenames(self):
"""
Receiving file upload when filename is blank (before and after
sanitization) should be okay.
"""
filenames = [
"",
# Normalized by MultiPartParser.IE_sanitize().
"C:\\Windows\\",
# Normalized by os.path.basename().
"/",
"ends-with-slash/",
]
payload = client.FakePayload()
for i, name in enumerate(filenames):
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"'
% (i, name),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
]
)
)
payload.write("\r\n--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
# Empty filenames should be ignored
received = response.json()
for i, name in enumerate(filenames):
self.assertIsNone(received.get("file%s" % i))
def test_non_printable_chars_in_file_names(self):
file_name = "non-\x00printable\x00\n_chars.txt\x00"
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
f'Content-Disposition: form-data; name="file"; '
f'filename="{file_name}"',
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
]
)
)
payload.write("\r\n--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
# Non-printable chars are sanitized.
received = response.json()
self.assertEqual(received["file"], "non-printable_chars.txt")
def test_dangerous_file_names(self):
"""Uploaded file names should be sanitized before ever reaching the view."""
# This test simulates possible directory traversal attacks by a
# malicious uploader We have to do some monkeybusiness here to construct
# a malicious payload with an invalid file name (containing os.sep or
# os.pardir). This similar to what an attacker would need to do when
# trying such an attack.
payload = client.FakePayload()
for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES):
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"'
% (i, name),
"Content-Type: application/octet-stream",
"",
"You got pwnd.\r\n",
]
)
)
payload.write("\r\n--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
# The filenames should have been sanitized by the time it got to the view.
received = response.json()
for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES):
got = received["file%s" % i]
self.assertEqual(got, "hax0rd.txt")
def test_filename_overflow(self):
"""File names over 256 characters (dangerous on some platforms) get fixed up."""
long_str = "f" * 300
cases = [
# field name, filename, expected
("long_filename", "%s.txt" % long_str, "%s.txt" % long_str[:251]),
("long_extension", "foo.%s" % long_str, ".%s" % long_str[:254]),
("no_extension", long_str, long_str[:255]),
("no_filename", ".%s" % long_str, ".%s" % long_str[:254]),
("long_everything", "%s.%s" % (long_str, long_str), ".%s" % long_str[:254]),
]
payload = client.FakePayload()
for name, filename, _ in cases:
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="{}"; filename="{}"',
"Content-Type: application/octet-stream",
"",
"Oops.",
"",
]
).format(name, filename)
)
payload.write("\r\n--" + client.BOUNDARY + "--\r\n")
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
result = response.json()
for name, _, expected in cases:
got = result[name]
self.assertEqual(expected, got, "Mismatch for {}".format(name))
self.assertLess(
len(got), 256, "Got a long file name (%s characters)." % len(got)
)
def test_file_content(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(
suffix=".ctype_extra"
) as simple_file:
no_content_type.write(b"no content")
no_content_type.seek(0)
simple_file.write(b"text content")
simple_file.seek(0)
simple_file.content_type = "text/plain"
string_io = StringIO("string content")
bytes_io = BytesIO(b"binary content")
response = self.client.post(
"/echo_content/",
{
"no_content_type": no_content_type,
"simple_file": simple_file,
"string": string_io,
"binary": bytes_io,
},
)
received = response.json()
self.assertEqual(received["no_content_type"], "no content")
self.assertEqual(received["simple_file"], "text content")
self.assertEqual(received["string"], "string content")
self.assertEqual(received["binary"], "binary content")
def test_content_type_extra(self):
"""Uploaded files may have content type parameters available."""
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(
suffix=".ctype_extra"
) as simple_file:
no_content_type.write(b"something")
no_content_type.seek(0)
simple_file.write(b"something")
simple_file.seek(0)
simple_file.content_type = "text/plain; test-key=test_value"
response = self.client.post(
"/echo_content_type_extra/",
{
"no_content_type": no_content_type,
"simple_file": simple_file,
},
)
received = response.json()
self.assertEqual(received["no_content_type"], {})
self.assertEqual(received["simple_file"], {"test-key": "test_value"})
def test_truncated_multipart_handled_gracefully(self):
"""
If passed an incomplete multipart message, MultiPartParser does not
attempt to read beyond the end of the stream, and simply will handle
the part that can be parsed gracefully.
"""
payload_str = "\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="foo.txt"',
"Content-Type: application/octet-stream",
"",
"file contents" "--" + client.BOUNDARY + "--",
"",
]
)
payload = client.FakePayload(payload_str[:-10])
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
self.assertEqual(self.client.request(**r).json(), {})
def test_empty_multipart_handled_gracefully(self):
"""
If passed an empty multipart message, MultiPartParser will return
an empty QueryDict.
"""
r = {
"CONTENT_LENGTH": 0,
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/echo/",
"REQUEST_METHOD": "POST",
"wsgi.input": client.FakePayload(b""),
}
self.assertEqual(self.client.request(**r).json(), {})
def test_custom_upload_handler(self):
file = tempfile.NamedTemporaryFile
with file() as smallfile, file() as bigfile:
# A small file (under the 5M quota)
smallfile.write(b"a" * (2**21))
smallfile.seek(0)
# A big file (over the quota)
bigfile.write(b"a" * (10 * 2**20))
bigfile.seek(0)
# Small file posting should work.
self.assertIn("f", self.client.post("/quota/", {"f": smallfile}).json())
# Large files don't go through.
self.assertNotIn("f", self.client.post("/quota/", {"f": bigfile}).json())
def test_broken_custom_upload_handler(self):
with tempfile.NamedTemporaryFile() as file:
file.write(b"a" * (2**21))
file.seek(0)
msg = (
"You cannot alter upload handlers after the upload has been processed."
)
with self.assertRaisesMessage(AttributeError, msg):
self.client.post("/quota/broken/", {"f": file})
def test_stop_upload_temporary_file_handler(self):
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b"a")
temp_file.seek(0)
response = self.client.post("/temp_file/stop_upload/", {"file": temp_file})
temp_path = response.json()["temp_path"]
self.assertIs(os.path.exists(temp_path), False)
def test_upload_interrupted_temporary_file_handler(self):
# Simulate an interrupted upload by omitting the closing boundary.
class MockedParser(Parser):
def __iter__(self):
for item in super().__iter__():
item_type, meta_data, field_stream = item
yield item_type, meta_data, field_stream
if item_type == FILE:
return
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b"a")
temp_file.seek(0)
with mock.patch(
"django.http.multipartparser.Parser",
MockedParser,
):
response = self.client.post(
"/temp_file/upload_interrupted/",
{"file": temp_file},
)
temp_path = response.json()["temp_path"]
self.assertIs(os.path.exists(temp_path), False)
def test_fileupload_getlist(self):
file = tempfile.NamedTemporaryFile
with file() as file1, file() as file2, file() as file2a:
file1.write(b"a" * (2**23))
file1.seek(0)
file2.write(b"a" * (2 * 2**18))
file2.seek(0)
file2a.write(b"a" * (5 * 2**20))
file2a.seek(0)
response = self.client.post(
"/getlist_count/",
{
"file1": file1,
"field1": "test",
"field2": "test3",
"field3": "test5",
"field4": "test6",
"field5": "test7",
"file2": (file2, file2a),
},
)
got = response.json()
self.assertEqual(got.get("file1"), 1)
self.assertEqual(got.get("file2"), 2)
def test_fileuploads_closed_at_request_end(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post(
"/fd_closing/t/",
{
"file": f1,
"file2": (f2a, f2b),
},
)
request = response.wsgi_request
# The files were parsed.
self.assertTrue(hasattr(request, "_files"))
file = request._files["file"]
self.assertTrue(file.closed)
files = request._files.getlist("file2")
self.assertTrue(files[0].closed)
self.assertTrue(files[1].closed)
def test_no_parsing_triggered_by_fd_closing(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post(
"/fd_closing/f/",
{
"file": f1,
"file2": (f2a, f2b),
},
)
request = response.wsgi_request
# The fd closing logic doesn't trigger parsing of the stream
self.assertFalse(hasattr(request, "_files"))
def test_file_error_blocking(self):
"""
The server should not block when there are upload errors (bug #8622).
This can happen if something -- i.e. an exception handler -- tries to
access POST while handling an error in parsing POST. This shouldn't
cause an infinite loop!
"""
class POSTAccessingHandler(client.ClientHandler):
"""A handler that'll access POST during an exception."""
def handle_uncaught_exception(self, request, resolver, exc_info):
ret = super().handle_uncaught_exception(request, resolver, exc_info)
request.POST # evaluate
return ret
# Maybe this is a little more complicated that it needs to be; but if
# the django.test.client.FakePayload.read() implementation changes then
# this test would fail. So we need to know exactly what kind of error
# it raises when there is an attempt to read more than the available bytes:
try:
client.FakePayload(b"a").read(2)
except Exception as err:
reference_error = err
# install the custom handler that tries to access request.POST
self.client.handler = POSTAccessingHandler()
with open(__file__, "rb") as fp:
post_data = {
"name": "Ringo",
"file_field": fp,
}
try:
self.client.post("/upload_errors/", post_data)
except reference_error.__class__ as err:
self.assertNotEqual(
str(err),
str(reference_error),
"Caught a repeated exception that'll cause an infinite loop in "
"file uploads.",
)
except Exception as err:
# CustomUploadError is the error that should have been raised
self.assertEqual(err.__class__, uploadhandler.CustomUploadError)
def test_filename_case_preservation(self):
"""
The storage backend shouldn't mess with the case of the filenames
uploaded.
"""
# Synthesize the contents of a file upload with a mixed case filename
# so we don't have to carry such a file in the Django tests source code
# tree.
vars = {"boundary": "oUrBoUnDaRyStRiNg"}
post_data = [
"--%(boundary)s",
'Content-Disposition: form-data; name="file_field"; '
'filename="MiXeD_cAsE.txt"',
"Content-Type: application/octet-stream",
"",
"file contents\n",
"--%(boundary)s--\r\n",
]
response = self.client.post(
"/filename_case/",
"\r\n".join(post_data) % vars,
"multipart/form-data; boundary=%(boundary)s" % vars,
)
self.assertEqual(response.status_code, 200)
id = int(response.content)
obj = FileModel.objects.get(pk=id)
# The name of the file uploaded and the file stored in the server-side
# shouldn't differ.
self.assertEqual(os.path.basename(obj.testfile.path), "MiXeD_cAsE.txt")
def test_filename_traversal_upload(self):
os.makedirs(UPLOAD_TO, exist_ok=True)
tests = [
"../test.txt",
"../test.txt",
]
for file_name in tests:
with self.subTest(file_name=file_name):
payload = client.FakePayload()
payload.write(
"\r\n".join(
[
"--" + client.BOUNDARY,
'Content-Disposition: form-data; name="my_file"; '
'filename="%s";' % file_name,
"Content-Type: text/plain",
"",
"file contents.\r\n",
"\r\n--" + client.BOUNDARY + "--\r\n",
]
),
)
r = {
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": client.MULTIPART_CONTENT,
"PATH_INFO": "/upload_traversal/",
"REQUEST_METHOD": "POST",
"wsgi.input": payload,
}
response = self.client.request(**r)
result = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(result["file_name"], "test.txt")
self.assertIs(
os.path.exists(os.path.join(MEDIA_ROOT, "test.txt")),
False,
)
self.assertIs(
os.path.exists(os.path.join(UPLOAD_TO, "test.txt")),
True,
)
@override_settings(MEDIA_ROOT=MEDIA_ROOT)
class DirectoryCreationTests(SimpleTestCase):
"""
Tests for error handling during directory creation
via _save_FIELD_file (ticket #6450)
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
os.makedirs(MEDIA_ROOT, exist_ok=True)
cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT)
def setUp(self):
self.obj = FileModel()
@unittest.skipIf(
sys.platform == "win32", "Python on Windows doesn't have working os.chmod()."
)
def test_readonly_root(self):
"""Permission errors are not swallowed"""
os.chmod(MEDIA_ROOT, 0o500)
self.addCleanup(os.chmod, MEDIA_ROOT, 0o700)
with self.assertRaises(PermissionError):
self.obj.testfile.save(
"foo.txt", SimpleUploadedFile("foo.txt", b"x"), save=False
)
def test_not_a_directory(self):
# Create a file with the upload directory name
open(UPLOAD_TO, "wb").close()
self.addCleanup(os.remove, UPLOAD_TO)
msg = "%s exists and is not a directory." % UPLOAD_TO
with self.assertRaisesMessage(FileExistsError, msg):
with SimpleUploadedFile("foo.txt", b"x") as file:
self.obj.testfile.save("foo.txt", file, save=False)
class MultiParserTests(SimpleTestCase):
def test_empty_upload_handlers(self):
# We're not actually parsing here; just checking if the parser properly
# instantiates with empty upload handlers.
MultiPartParser(
{
"CONTENT_TYPE": "multipart/form-data; boundary=_foo",
"CONTENT_LENGTH": "1",
},
StringIO("x"),
[],
"utf-8",
)
def test_invalid_content_type(self):
with self.assertRaisesMessage(
MultiPartParserError, "Invalid Content-Type: text/plain"
):
MultiPartParser(
{
"CONTENT_TYPE": "text/plain",
"CONTENT_LENGTH": "1",
},
StringIO("x"),
[],
"utf-8",
)
def test_negative_content_length(self):
with self.assertRaisesMessage(
MultiPartParserError, "Invalid content length: -1"
):
MultiPartParser(
{
"CONTENT_TYPE": "multipart/form-data; boundary=_foo",
"CONTENT_LENGTH": -1,
},
StringIO("x"),
[],
"utf-8",
)
def test_bad_type_content_length(self):
multipart_parser = MultiPartParser(
{
"CONTENT_TYPE": "multipart/form-data; boundary=_foo",
"CONTENT_LENGTH": "a",
},
StringIO("x"),
[],
"utf-8",
)
self.assertEqual(multipart_parser._content_length, 0)
def test_sanitize_file_name(self):
parser = MultiPartParser(
{
"CONTENT_TYPE": "multipart/form-data; boundary=_foo",
"CONTENT_LENGTH": "1",
},
StringIO("x"),
[],
"utf-8",
)
for file_name in CANDIDATE_TRAVERSAL_FILE_NAMES:
with self.subTest(file_name=file_name):
self.assertEqual(parser.sanitize_file_name(file_name), "hax0rd.txt")
def test_sanitize_invalid_file_name(self):
parser = MultiPartParser(
{
"CONTENT_TYPE": "multipart/form-data; boundary=_foo",
"CONTENT_LENGTH": "1",
},
StringIO("x"),
[],
"utf-8",
)
for file_name in CANDIDATE_INVALID_FILE_NAMES:
with self.subTest(file_name=file_name):
self.assertIsNone(parser.sanitize_file_name(file_name))
|
ded707f79d78073c0acbf619c95380f987c0bd4381084a2fc264a2ce8dbbfd11 | """
Upload handlers to test the upload API.
"""
import os
from tempfile import NamedTemporaryFile
from django.core.files.uploadhandler import (
FileUploadHandler,
StopUpload,
TemporaryFileUploadHandler,
)
class QuotaUploadHandler(FileUploadHandler):
"""
This test upload handler terminates the connection if more than a quota
(5MB) is uploaded.
"""
QUOTA = 5 * 2**20 # 5 MB
def __init__(self, request=None):
super().__init__(request)
self.total_upload = 0
def receive_data_chunk(self, raw_data, start):
self.total_upload += len(raw_data)
if self.total_upload >= self.QUOTA:
raise StopUpload(connection_reset=True)
return raw_data
def file_complete(self, file_size):
return None
class StopUploadTemporaryFileHandler(TemporaryFileUploadHandler):
"""A handler that raises a StopUpload exception."""
def receive_data_chunk(self, raw_data, start):
raise StopUpload()
class CustomUploadError(Exception):
pass
class ErroringUploadHandler(FileUploadHandler):
"""A handler that raises an exception."""
def receive_data_chunk(self, raw_data, start):
raise CustomUploadError("Oops!")
class TraversalUploadHandler(FileUploadHandler):
"""A handler with potential directory-traversal vulnerability."""
def __init__(self, request=None):
from .tests import UPLOAD_TO
super().__init__(request)
self.upload_dir = UPLOAD_TO
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
with open(os.path.join(self.upload_dir, self.file_name), "wb") as fp:
fp.write(self.file.read())
return self.file
def new_file(
self,
field_name,
file_name,
content_type,
content_length,
charset=None,
content_type_extra=None,
):
super().new_file(
file_name,
file_name,
content_length,
content_length,
charset,
content_type_extra,
)
self.file = NamedTemporaryFile(suffix=".upload", dir=self.upload_dir)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
|
646c81d5508eb47dcddc9dbe21d9068a8bf26f4570bea8ca0282d7f6173c8bdf | import hashlib
import os
from django.core.files.uploadedfile import UploadedFile
from django.core.files.uploadhandler import TemporaryFileUploadHandler
from django.http import HttpResponse, HttpResponseServerError, JsonResponse
from .models import FileModel
from .tests import UNICODE_FILENAME, UPLOAD_FOLDER
from .uploadhandler import (
ErroringUploadHandler,
QuotaUploadHandler,
StopUploadTemporaryFileHandler,
TraversalUploadHandler,
)
def file_upload_view(request):
"""
A file upload can be updated into the POST dictionary.
"""
form_data = request.POST.copy()
form_data.update(request.FILES)
if isinstance(form_data.get("file_field"), UploadedFile) and isinstance(
form_data["name"], str
):
# If a file is posted, the dummy client should only post the file name,
# not the full path.
if os.path.dirname(form_data["file_field"].name) != "":
return HttpResponseServerError()
return HttpResponse()
else:
return HttpResponseServerError()
def file_upload_view_verify(request):
"""
Use the sha digest hash to verify the uploaded contents.
"""
form_data = request.POST.copy()
form_data.update(request.FILES)
for key, value in form_data.items():
if key.endswith("_hash"):
continue
if key + "_hash" not in form_data:
continue
submitted_hash = form_data[key + "_hash"]
if isinstance(value, UploadedFile):
new_hash = hashlib.sha1(value.read()).hexdigest()
else:
new_hash = hashlib.sha1(value.encode()).hexdigest()
if new_hash != submitted_hash:
return HttpResponseServerError()
# Adding large file to the database should succeed
largefile = request.FILES["file_field2"]
obj = FileModel()
obj.testfile.save(largefile.name, largefile)
return HttpResponse()
def file_upload_unicode_name(request):
# Check to see if Unicode name came through properly.
if not request.FILES["file_unicode"].name.endswith(UNICODE_FILENAME):
return HttpResponseServerError()
# Check to make sure the exotic characters are preserved even
# through file save.
uni_named_file = request.FILES["file_unicode"]
file_model = FileModel.objects.create(testfile=uni_named_file)
full_name = f"{UPLOAD_FOLDER}/{uni_named_file.name}"
return (
HttpResponse()
if file_model.testfile.storage.exists(full_name)
else HttpResponseServerError()
)
def file_upload_echo(request):
"""
Simple view to echo back info about uploaded files for tests.
"""
r = {k: f.name for k, f in request.FILES.items()}
return JsonResponse(r)
def file_upload_echo_content(request):
"""
Simple view to echo back the content of uploaded files for tests.
"""
def read_and_close(f):
with f:
return f.read().decode()
r = {k: read_and_close(f) for k, f in request.FILES.items()}
return JsonResponse(r)
def file_upload_quota(request):
"""
Dynamically add in an upload handler.
"""
request.upload_handlers.insert(0, QuotaUploadHandler())
return file_upload_echo(request)
def file_upload_quota_broken(request):
"""
You can't change handlers after reading FILES; this view shouldn't work.
"""
response = file_upload_echo(request)
request.upload_handlers.insert(0, QuotaUploadHandler())
return response
def file_stop_upload_temporary_file(request):
request.upload_handlers.insert(0, StopUploadTemporaryFileHandler())
request.upload_handlers.pop(2)
request.FILES # Trigger file parsing.
return JsonResponse(
{"temp_path": request.upload_handlers[0].file.temporary_file_path()},
)
def file_upload_interrupted_temporary_file(request):
request.upload_handlers.insert(0, TemporaryFileUploadHandler())
request.upload_handlers.pop(2)
request.FILES # Trigger file parsing.
return JsonResponse(
{"temp_path": request.upload_handlers[0].file.temporary_file_path()},
)
def file_upload_getlist_count(request):
"""
Check the .getlist() function to ensure we receive the correct number of files.
"""
file_counts = {}
for key in request.FILES:
file_counts[key] = len(request.FILES.getlist(key))
return JsonResponse(file_counts)
def file_upload_errors(request):
request.upload_handlers.insert(0, ErroringUploadHandler())
return file_upload_echo(request)
def file_upload_filename_case_view(request):
"""
Check adding the file to the database will preserve the filename case.
"""
file = request.FILES["file_field"]
obj = FileModel()
obj.testfile.save(file.name, file)
return HttpResponse("%d" % obj.pk)
def file_upload_content_type_extra(request):
"""
Simple view to echo back extra content-type parameters.
"""
params = {}
for file_name, uploadedfile in request.FILES.items():
params[file_name] = {
k: v.decode() for k, v in uploadedfile.content_type_extra.items()
}
return JsonResponse(params)
def file_upload_fd_closing(request, access):
if access == "t":
request.FILES # Trigger file parsing.
return HttpResponse()
def file_upload_traversal_view(request):
request.upload_handlers.insert(0, TraversalUploadHandler())
request.FILES # Trigger file parsing.
return JsonResponse(
{"file_name": request.upload_handlers[0].file_name},
)
|
038f247257a28fc19d0c02ad026f5eb569b3fe94bee1be988b7f5a22f174a267 | import unittest
from django.core.exceptions import ImproperlyConfigured
from django.db import ProgrammingError
try:
from django.contrib.gis.db.backends.postgis.operations import PostGISOperations
HAS_POSTGRES = True
except ImportError:
HAS_POSTGRES = False
if HAS_POSTGRES:
class FakeConnection:
def __init__(self):
self.settings_dict = {
"NAME": "test",
}
class FakePostGISOperations(PostGISOperations):
def __init__(self, version=None):
self.version = version
self.connection = FakeConnection()
def _get_postgis_func(self, func):
if func == "postgis_lib_version":
if self.version is None:
raise ProgrammingError
else:
return self.version
elif func == "version":
pass
else:
raise NotImplementedError("This function was not expected to be called")
@unittest.skipUnless(HAS_POSTGRES, "The psycopg driver is needed for these tests")
class TestPostGISVersionCheck(unittest.TestCase):
"""
The PostGIS version check parses correctly the version numbers
"""
def test_get_version(self):
expect = "1.0.0"
ops = FakePostGISOperations(expect)
actual = ops.postgis_lib_version()
self.assertEqual(expect, actual)
def test_version_classic_tuple(self):
expect = ("1.2.3", 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_version_dev_tuple(self):
expect = ("1.2.3dev", 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_version_loose_tuple(self):
expect = ("1.2.3b1.dev0", 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_valid_version_numbers(self):
versions = [
("1.3.0", 1, 3, 0),
("2.1.1", 2, 1, 1),
("2.2.0dev", 2, 2, 0),
]
for version in versions:
with self.subTest(version=version):
ops = FakePostGISOperations(version[0])
actual = ops.spatial_version
self.assertEqual(version[1:], actual)
def test_no_version_number(self):
ops = FakePostGISOperations()
with self.assertRaises(ImproperlyConfigured):
ops.spatial_version
|
a661975c5e0b3fa52f30636b0fe4cf30b2cba45dda40b0995b8a8bbc31c065cc | from math import ceil
from django.db import connection, models
from django.db.models import ProtectedError, Q, RestrictedError
from django.db.models.deletion import Collector
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
B1,
B2,
B3,
MR,
A,
Avatar,
B,
Base,
Child,
DeleteBottom,
DeleteTop,
GenericB1,
GenericB2,
GenericDeleteBottom,
HiddenUser,
HiddenUserProfile,
M,
M2MFrom,
M2MTo,
MRNull,
Origin,
P,
Parent,
R,
RChild,
RChildChild,
Referrer,
S,
T,
User,
create_a,
get_default_r,
)
class OnDeleteTests(TestCase):
def setUp(self):
self.DEFAULT = get_default_r()
def test_auto(self):
a = create_a("auto")
a.auto.delete()
self.assertFalse(A.objects.filter(name="auto").exists())
def test_non_callable(self):
msg = "on_delete must be callable."
with self.assertRaisesMessage(TypeError, msg):
models.ForeignKey("self", on_delete=None)
with self.assertRaisesMessage(TypeError, msg):
models.OneToOneField("self", on_delete=None)
def test_auto_nullable(self):
a = create_a("auto_nullable")
a.auto_nullable.delete()
self.assertFalse(A.objects.filter(name="auto_nullable").exists())
def test_setvalue(self):
a = create_a("setvalue")
a.setvalue.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setvalue.pk)
def test_setnull(self):
a = create_a("setnull")
a.setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setnull)
def test_setdefault(self):
a = create_a("setdefault")
a.setdefault.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setdefault.pk)
def test_setdefault_none(self):
a = create_a("setdefault_none")
a.setdefault_none.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setdefault_none)
def test_cascade(self):
a = create_a("cascade")
a.cascade.delete()
self.assertFalse(A.objects.filter(name="cascade").exists())
def test_cascade_nullable(self):
a = create_a("cascade_nullable")
a.cascade_nullable.delete()
self.assertFalse(A.objects.filter(name="cascade_nullable").exists())
def test_protect(self):
a = create_a("protect")
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through protected foreign keys: 'A.protect'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.delete()
self.assertEqual(cm.exception.protected_objects, {a})
def test_protect_multiple(self):
a = create_a("protect")
b = B.objects.create(protect=a.protect)
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through protected foreign keys: 'A.protect', "
"'B.protect'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.delete()
self.assertEqual(cm.exception.protected_objects, {a, b})
def test_protect_path(self):
a = create_a("protect")
a.protect.p = P.objects.create()
a.protect.save()
msg = (
"Cannot delete some instances of model 'P' because they are "
"referenced through protected foreign keys: 'R.p'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.p.delete()
self.assertEqual(cm.exception.protected_objects, {a})
def test_do_nothing(self):
# Testing DO_NOTHING is a bit harder: It would raise IntegrityError for
# a normal model, so we connect to pre_delete and set the fk to a known
# value.
replacement_r = R.objects.create()
def check_do_nothing(sender, **kwargs):
obj = kwargs["instance"]
obj.donothing_set.update(donothing=replacement_r)
models.signals.pre_delete.connect(check_do_nothing)
a = create_a("do_nothing")
a.donothing.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(replacement_r, a.donothing)
models.signals.pre_delete.disconnect(check_do_nothing)
def test_do_nothing_qscount(self):
"""
A models.DO_NOTHING relation doesn't trigger a query.
"""
b = Base.objects.create()
with self.assertNumQueries(1):
# RelToBase should not be queried.
b.delete()
self.assertEqual(Base.objects.count(), 0)
def test_inheritance_cascade_up(self):
child = RChild.objects.create()
child.delete()
self.assertFalse(R.objects.filter(pk=child.pk).exists())
def test_inheritance_cascade_down(self):
child = RChild.objects.create()
parent = child.r_ptr
parent.delete()
self.assertFalse(RChild.objects.filter(pk=child.pk).exists())
def test_cascade_from_child(self):
a = create_a("child")
a.child.delete()
self.assertFalse(A.objects.filter(name="child").exists())
self.assertFalse(R.objects.filter(pk=a.child_id).exists())
def test_cascade_from_parent(self):
a = create_a("child")
R.objects.get(pk=a.child_id).delete()
self.assertFalse(A.objects.filter(name="child").exists())
self.assertFalse(RChild.objects.filter(pk=a.child_id).exists())
def test_setnull_from_child(self):
a = create_a("child_setnull")
a.child_setnull.delete()
self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_setnull_from_parent(self):
a = create_a("child_setnull")
R.objects.get(pk=a.child_setnull_id).delete()
self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_o2o_setnull(self):
a = create_a("o2o_setnull")
a.o2o_setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.o2o_setnull)
def test_restrict(self):
a = create_a("restrict")
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through restricted foreign keys: 'A.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.delete()
self.assertEqual(cm.exception.restricted_objects, {a})
def test_restrict_multiple(self):
a = create_a("restrict")
b3 = B3.objects.create(restrict=a.restrict)
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through restricted foreign keys: 'A.restrict', "
"'B3.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.delete()
self.assertEqual(cm.exception.restricted_objects, {a, b3})
def test_restrict_path_cascade_indirect(self):
a = create_a("restrict")
a.restrict.p = P.objects.create()
a.restrict.save()
msg = (
"Cannot delete some instances of model 'P' because they are "
"referenced through restricted foreign keys: 'A.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.p.delete()
self.assertEqual(cm.exception.restricted_objects, {a})
# Object referenced also with CASCADE relationship can be deleted.
a.cascade.p = a.restrict.p
a.cascade.save()
a.restrict.p.delete()
self.assertFalse(A.objects.filter(name="restrict").exists())
self.assertFalse(R.objects.filter(pk=a.restrict_id).exists())
def test_restrict_path_cascade_direct(self):
a = create_a("restrict")
a.restrict.p = P.objects.create()
a.restrict.save()
a.cascade_p = a.restrict.p
a.save()
a.restrict.p.delete()
self.assertFalse(A.objects.filter(name="restrict").exists())
self.assertFalse(R.objects.filter(pk=a.restrict_id).exists())
def test_restrict_path_cascade_indirect_diamond(self):
delete_top = DeleteTop.objects.create()
b1 = B1.objects.create(delete_top=delete_top)
b2 = B2.objects.create(delete_top=delete_top)
delete_bottom = DeleteBottom.objects.create(b1=b1, b2=b2)
msg = (
"Cannot delete some instances of model 'B1' because they are "
"referenced through restricted foreign keys: 'DeleteBottom.b1'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
b1.delete()
self.assertEqual(cm.exception.restricted_objects, {delete_bottom})
self.assertTrue(DeleteTop.objects.exists())
self.assertTrue(B1.objects.exists())
self.assertTrue(B2.objects.exists())
self.assertTrue(DeleteBottom.objects.exists())
# Object referenced also with CASCADE relationship can be deleted.
delete_top.delete()
self.assertFalse(DeleteTop.objects.exists())
self.assertFalse(B1.objects.exists())
self.assertFalse(B2.objects.exists())
self.assertFalse(DeleteBottom.objects.exists())
def test_restrict_gfk_no_fast_delete(self):
delete_top = DeleteTop.objects.create()
generic_b1 = GenericB1.objects.create(generic_delete_top=delete_top)
generic_b2 = GenericB2.objects.create(generic_delete_top=delete_top)
generic_delete_bottom = GenericDeleteBottom.objects.create(
generic_b1=generic_b1,
generic_b2=generic_b2,
)
msg = (
"Cannot delete some instances of model 'GenericB1' because they "
"are referenced through restricted foreign keys: "
"'GenericDeleteBottom.generic_b1'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
generic_b1.delete()
self.assertEqual(cm.exception.restricted_objects, {generic_delete_bottom})
self.assertTrue(DeleteTop.objects.exists())
self.assertTrue(GenericB1.objects.exists())
self.assertTrue(GenericB2.objects.exists())
self.assertTrue(GenericDeleteBottom.objects.exists())
# Object referenced also with CASCADE relationship can be deleted.
delete_top.delete()
self.assertFalse(DeleteTop.objects.exists())
self.assertFalse(GenericB1.objects.exists())
self.assertFalse(GenericB2.objects.exists())
self.assertFalse(GenericDeleteBottom.objects.exists())
class DeletionTests(TestCase):
def test_sliced_queryset(self):
msg = "Cannot use 'limit' or 'offset' with delete()."
with self.assertRaisesMessage(TypeError, msg):
M.objects.all()[0:5].delete()
def test_pk_none(self):
m = M()
msg = "M object can't be deleted because its id attribute is set to None."
with self.assertRaisesMessage(ValueError, msg):
m.delete()
def test_m2m(self):
m = M.objects.create()
r = R.objects.create()
MR.objects.create(m=m, r=r)
r.delete()
self.assertFalse(MR.objects.exists())
r = R.objects.create()
MR.objects.create(m=m, r=r)
m.delete()
self.assertFalse(MR.objects.exists())
m = M.objects.create()
r = R.objects.create()
m.m2m.add(r)
r.delete()
through = M._meta.get_field("m2m").remote_field.through
self.assertFalse(through.objects.exists())
r = R.objects.create()
m.m2m.add(r)
m.delete()
self.assertFalse(through.objects.exists())
m = M.objects.create()
r = R.objects.create()
MRNull.objects.create(m=m, r=r)
r.delete()
self.assertFalse(not MRNull.objects.exists())
self.assertFalse(m.m2m_through_null.exists())
def test_bulk(self):
s = S.objects.create(r=R.objects.create())
for i in range(2 * GET_ITERATOR_CHUNK_SIZE):
T.objects.create(s=s)
# 1 (select related `T` instances)
# + 1 (select related `U` instances)
# + 2 (delete `T` instances in batches)
# + 1 (delete `s`)
self.assertNumQueries(5, s.delete)
self.assertFalse(S.objects.exists())
def test_instance_update(self):
deleted = []
related_setnull_sets = []
def pre_delete(sender, **kwargs):
obj = kwargs["instance"]
deleted.append(obj)
if isinstance(obj, R):
related_setnull_sets.append([a.pk for a in obj.setnull_set.all()])
models.signals.pre_delete.connect(pre_delete)
a = create_a("update_setnull")
a.setnull.delete()
a = create_a("update_cascade")
a.cascade.delete()
for obj in deleted:
self.assertIsNone(obj.pk)
for pk_list in related_setnull_sets:
for a in A.objects.filter(id__in=pk_list):
self.assertIsNone(a.setnull)
models.signals.pre_delete.disconnect(pre_delete)
def test_deletion_order(self):
pre_delete_order = []
post_delete_order = []
def log_post_delete(sender, **kwargs):
pre_delete_order.append((sender, kwargs["instance"].pk))
def log_pre_delete(sender, **kwargs):
post_delete_order.append((sender, kwargs["instance"].pk))
models.signals.post_delete.connect(log_post_delete)
models.signals.pre_delete.connect(log_pre_delete)
r = R.objects.create()
s1 = S.objects.create(r=r)
s2 = S.objects.create(r=r)
t1 = T.objects.create(s=s1)
t2 = T.objects.create(s=s2)
rchild = RChild.objects.create(r_ptr=r)
r_pk = r.pk
r.delete()
self.assertEqual(
pre_delete_order,
[
(T, t2.pk),
(T, t1.pk),
(RChild, rchild.pk),
(S, s2.pk),
(S, s1.pk),
(R, r_pk),
],
)
self.assertEqual(
post_delete_order,
[
(T, t1.pk),
(T, t2.pk),
(RChild, rchild.pk),
(S, s1.pk),
(S, s2.pk),
(R, r_pk),
],
)
models.signals.post_delete.disconnect(log_post_delete)
models.signals.pre_delete.disconnect(log_pre_delete)
def test_relational_post_delete_signals_happen_before_parent_object(self):
deletions = []
def log_post_delete(instance, **kwargs):
self.assertTrue(R.objects.filter(pk=instance.r_id))
self.assertIs(type(instance), S)
deletions.append(instance.id)
r = R.objects.create()
s = S.objects.create(r=r)
s_id = s.pk
models.signals.post_delete.connect(log_post_delete, sender=S)
try:
r.delete()
finally:
models.signals.post_delete.disconnect(log_post_delete)
self.assertEqual(len(deletions), 1)
self.assertEqual(deletions[0], s_id)
@skipUnlessDBFeature("can_defer_constraint_checks")
def test_can_defer_constraint_checks(self):
u = User.objects.create(avatar=Avatar.objects.create())
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to delete the avatar
# The important thing is that when we can defer constraint checks there
# is no need to do an UPDATE on User.avatar to null it out.
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append("")
models.signals.post_delete.connect(noop, sender=User)
self.assertNumQueries(3, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
@skipIfDBFeature("can_defer_constraint_checks")
def test_cannot_defer_constraint_checks(self):
u = User.objects.create(avatar=Avatar.objects.create())
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append("")
models.signals.post_delete.connect(noop, sender=User)
a = Avatar.objects.get(pk=u.avatar_id)
# The below doesn't make sense... Why do we need to null out
# user.avatar if we are going to delete the user immediately after it,
# and there are no more cascades.
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to null out user.avatar, because we can't defer the constraint
# 1 query to delete the avatar
self.assertNumQueries(4, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
def test_hidden_related(self):
r = R.objects.create()
h = HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h)
r.delete()
self.assertEqual(HiddenUserProfile.objects.count(), 0)
def test_large_delete(self):
TEST_SIZE = 2000
objs = [Avatar() for i in range(0, TEST_SIZE)]
Avatar.objects.bulk_create(objs)
# Calculate the number of queries needed.
batch_size = connection.ops.bulk_batch_size(["pk"], objs)
# The related fetches are done in batches.
batches = ceil(len(objs) / batch_size)
# One query for Avatar.objects.all() and then one related fast delete for
# each batch.
fetches_to_mem = 1 + batches
# The Avatar objects are going to be deleted in batches of
# GET_ITERATOR_CHUNK_SIZE.
queries = fetches_to_mem + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE
self.assertNumQueries(queries, Avatar.objects.all().delete)
self.assertFalse(Avatar.objects.exists())
def test_large_delete_related(self):
TEST_SIZE = 2000
s = S.objects.create(r=R.objects.create())
for i in range(TEST_SIZE):
T.objects.create(s=s)
batch_size = max(connection.ops.bulk_batch_size(["pk"], range(TEST_SIZE)), 1)
# TEST_SIZE / batch_size (select related `T` instances)
# + 1 (select related `U` instances)
# + TEST_SIZE / GET_ITERATOR_CHUNK_SIZE (delete `T` instances in batches)
# + 1 (delete `s`)
expected_num_queries = ceil(TEST_SIZE / batch_size)
expected_num_queries += ceil(TEST_SIZE / GET_ITERATOR_CHUNK_SIZE) + 2
self.assertNumQueries(expected_num_queries, s.delete)
self.assertFalse(S.objects.exists())
self.assertFalse(T.objects.exists())
def test_delete_with_keeping_parents(self):
child = RChild.objects.create()
parent_id = child.r_ptr_id
child.delete(keep_parents=True)
self.assertFalse(RChild.objects.filter(id=child.id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
def test_delete_with_keeping_parents_relationships(self):
child = RChild.objects.create()
parent_id = child.r_ptr_id
parent_referent_id = S.objects.create(r=child.r_ptr).pk
child.delete(keep_parents=True)
self.assertFalse(RChild.objects.filter(id=child.id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
self.assertTrue(S.objects.filter(pk=parent_referent_id).exists())
childchild = RChildChild.objects.create()
parent_id = childchild.rchild_ptr.r_ptr_id
child_id = childchild.rchild_ptr_id
parent_referent_id = S.objects.create(r=childchild.rchild_ptr.r_ptr).pk
childchild.delete(keep_parents=True)
self.assertFalse(RChildChild.objects.filter(id=childchild.id).exists())
self.assertTrue(RChild.objects.filter(id=child_id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
self.assertTrue(S.objects.filter(pk=parent_referent_id).exists())
def test_queryset_delete_returns_num_rows(self):
"""
QuerySet.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
Avatar.objects.bulk_create(
[Avatar(desc="a"), Avatar(desc="b"), Avatar(desc="c")]
)
avatars_count = Avatar.objects.count()
deleted, rows_count = Avatar.objects.all().delete()
self.assertEqual(deleted, avatars_count)
# more complex example with multiple object types
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
}
deleted, deleted_objs = R.objects.all().delete()
self.assertCountEqual(deleted_objs.keys(), existed_objs.keys())
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
def test_model_delete_returns_num_rows(self):
"""
Model.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
h2 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
HiddenUserProfile.objects.create(user=h2)
m1 = M.objects.create()
m2 = M.objects.create()
MR.objects.create(r=r, m=m1)
r.m_set.add(m1)
r.m_set.add(m2)
r.save()
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
MR._meta.label: MR.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
M.m2m.through._meta.label: M.m2m.through.objects.count(),
}
deleted, deleted_objs = r.delete()
self.assertEqual(deleted, sum(existed_objs.values()))
self.assertCountEqual(deleted_objs.keys(), existed_objs.keys())
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
def test_proxied_model_duplicate_queries(self):
"""
#25685 - Deleting instances of a model with existing proxy
classes should not issue multiple queries during cascade
deletion of referring models.
"""
avatar = Avatar.objects.create()
# One query for the Avatar table and a second for the User one.
with self.assertNumQueries(2):
avatar.delete()
def test_only_referenced_fields_selected(self):
"""
Only referenced fields are selected during cascade deletion SELECT
unless deletion signals are connected.
"""
origin = Origin.objects.create()
expected_sql = str(
Referrer.objects.only(
# Both fields are referenced by SecondReferrer.
"id",
"unique_field",
)
.filter(origin__in=[origin])
.query
)
with self.assertNumQueries(2) as ctx:
origin.delete()
self.assertEqual(ctx.captured_queries[0]["sql"], expected_sql)
def receiver(instance, **kwargs):
pass
# All fields are selected if deletion signals are connected.
for signal_name in ("pre_delete", "post_delete"):
with self.subTest(signal=signal_name):
origin = Origin.objects.create()
signal = getattr(models.signals, signal_name)
signal.connect(receiver, sender=Referrer)
with self.assertNumQueries(2) as ctx:
origin.delete()
self.assertIn(
connection.ops.quote_name("large_field"),
ctx.captured_queries[0]["sql"],
)
signal.disconnect(receiver, sender=Referrer)
class FastDeleteTests(TestCase):
def test_fast_delete_all(self):
with self.assertNumQueries(1) as ctx:
User.objects.all().delete()
sql = ctx.captured_queries[0]["sql"]
# No subqueries is used when performing a full delete.
self.assertNotIn("SELECT", sql)
def test_fast_delete_fk(self):
u = User.objects.create(avatar=Avatar.objects.create())
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to fast-delete the user
# 1 query to delete the avatar
self.assertNumQueries(2, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
def test_fast_delete_m2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete f, 1 to fast-delete m2m for f
self.assertNumQueries(2, f.delete)
def test_fast_delete_revm2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete t, 1 to fast-delete t's m_set
self.assertNumQueries(2, f.delete)
def test_fast_delete_qs(self):
u1 = User.objects.create()
u2 = User.objects.create()
self.assertNumQueries(1, User.objects.filter(pk=u1.pk).delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_instance_set_pk_none(self):
u = User.objects.create()
# User can be fast-deleted.
collector = Collector(using="default")
self.assertTrue(collector.can_fast_delete(u))
u.delete()
self.assertIsNone(u.pk)
def test_fast_delete_joined_qs(self):
a = Avatar.objects.create(desc="a")
User.objects.create(avatar=a)
u2 = User.objects.create()
self.assertNumQueries(1, User.objects.filter(avatar__desc="a").delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_inheritance(self):
c = Child.objects.create()
p = Parent.objects.create()
# 1 for self, 1 for parent
self.assertNumQueries(2, c.delete)
self.assertFalse(Child.objects.exists())
self.assertEqual(Parent.objects.count(), 1)
self.assertEqual(Parent.objects.filter(pk=p.pk).count(), 1)
# 1 for self delete, 1 for fast delete of empty "child" qs.
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
# 1 for self delete, 1 for fast delete of empty "child" qs.
c = Child.objects.create()
p = c.parent_ptr
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
self.assertFalse(Child.objects.exists())
def test_fast_delete_large_batch(self):
User.objects.bulk_create(User() for i in range(0, 2000))
# No problems here - we aren't going to cascade, so we will fast
# delete the objects in a single query.
self.assertNumQueries(1, User.objects.all().delete)
a = Avatar.objects.create(desc="a")
User.objects.bulk_create(User(avatar=a) for i in range(0, 2000))
# We don't hit parameter amount limits for a, so just one query for
# that + fast delete of the related objs.
self.assertNumQueries(2, a.delete)
self.assertEqual(User.objects.count(), 0)
def test_fast_delete_empty_no_update_can_self_select(self):
"""
Fast deleting when DatabaseFeatures.update_can_self_select = False
works even if the specified filter doesn't match any row (#25932).
"""
with self.assertNumQueries(1):
self.assertEqual(
User.objects.filter(avatar__desc="missing").delete(),
(0, {}),
)
def test_fast_delete_combined_relationships(self):
# The cascading fast-delete of SecondReferrer should be combined
# in a single DELETE WHERE referrer_id OR unique_field.
origin = Origin.objects.create()
referer = Referrer.objects.create(origin=origin, unique_field=42)
with self.assertNumQueries(2):
referer.delete()
def test_fast_delete_aggregation(self):
# Fast-deleting when filtering against an aggregation result in
# a single query containing a subquery.
Base.objects.create()
with self.assertNumQueries(1):
self.assertEqual(
Base.objects.annotate(
rels_count=models.Count("rels"),
)
.filter(rels_count=0)
.delete(),
(1, {"delete.Base": 1}),
)
self.assertIs(Base.objects.exists(), False)
def test_fast_delete_full_match(self):
avatar = Avatar.objects.create(desc="bar")
User.objects.create(avatar=avatar)
with self.assertNumQueries(1):
User.objects.filter(~Q(pk__in=[]) | Q(avatar__desc="foo")).delete()
self.assertFalse(User.objects.exists())
|
b3a6dd6f3eb59b4a426f1fed55124b568bb8638e2decb3b0448087536aeb4dad | from unittest import mock
from django.core.checks import Error
from django.core.checks import Warning as DjangoWarning
from django.db import connection, models
from django.test.testcases import SimpleTestCase
from django.test.utils import isolate_apps, modify_settings, override_settings
@isolate_apps("invalid_models_tests")
class RelativeFieldTests(SimpleTestCase):
def test_valid_foreign_key_without_accessor(self):
class Target(models.Model):
# There would be a clash if Model.field installed an accessor.
model = models.IntegerField()
class Model(models.Model):
field = models.ForeignKey(Target, models.CASCADE, related_name="+")
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_foreign_key_to_missing_model(self):
# Model names are resolved when a model is being created, so we cannot
# test relative fields in isolation and we need to attach them to a
# model.
class Model(models.Model):
foreign_key = models.ForeignKey("Rel1", models.CASCADE)
field = Model._meta.get_field("foreign_key")
self.assertEqual(
field.check(),
[
Error(
"Field defines a relation with model 'Rel1', "
"which is either not installed, or is abstract.",
obj=field,
id="fields.E300",
),
],
)
@isolate_apps("invalid_models_tests")
def test_foreign_key_to_isolate_apps_model(self):
"""
#25723 - Referenced model registration lookup should be run against the
field's model registry.
"""
class OtherModel(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey("OtherModel", models.CASCADE)
field = Model._meta.get_field("foreign_key")
self.assertEqual(field.check(from_model=Model), [])
def test_many_to_many_to_missing_model(self):
class Model(models.Model):
m2m = models.ManyToManyField("Rel2")
field = Model._meta.get_field("m2m")
self.assertEqual(
field.check(from_model=Model),
[
Error(
"Field defines a relation with model 'Rel2', "
"which is either not installed, or is abstract.",
obj=field,
id="fields.E300",
),
],
)
@isolate_apps("invalid_models_tests")
def test_many_to_many_to_isolate_apps_model(self):
"""
#25723 - Referenced model registration lookup should be run against the
field's model registry.
"""
class OtherModel(models.Model):
pass
class Model(models.Model):
m2m = models.ManyToManyField("OtherModel")
field = Model._meta.get_field("m2m")
self.assertEqual(field.check(from_model=Model), [])
def test_many_to_many_with_useless_options(self):
class Model(models.Model):
name = models.CharField(max_length=20)
class ModelM2M(models.Model):
m2m = models.ManyToManyField(
Model, null=True, validators=[lambda x: x], db_comment="Column comment"
)
field = ModelM2M._meta.get_field("m2m")
self.assertEqual(
ModelM2M.check(),
[
DjangoWarning(
"null has no effect on ManyToManyField.",
obj=field,
id="fields.W340",
),
DjangoWarning(
"ManyToManyField does not support validators.",
obj=field,
id="fields.W341",
),
DjangoWarning(
"db_comment has no effect on ManyToManyField.",
obj=field,
id="fields.W346",
),
],
)
def test_many_to_many_with_useless_related_name(self):
class ModelM2M(models.Model):
m2m = models.ManyToManyField("self", related_name="children")
field = ModelM2M._meta.get_field("m2m")
self.assertEqual(
ModelM2M.check(),
[
DjangoWarning(
"related_name has no effect on ManyToManyField with "
'a symmetrical relationship, e.g. to "self".',
obj=field,
id="fields.W345",
),
],
)
def test_ambiguous_relationship_model_from(self):
class Person(models.Model):
pass
class Group(models.Model):
field = models.ManyToManyField("Person", through="AmbiguousRelationship")
class AmbiguousRelationship(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
first_group = models.ForeignKey(Group, models.CASCADE, related_name="first")
second_group = models.ForeignKey(
Group, models.CASCADE, related_name="second"
)
field = Group._meta.get_field("field")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"The model is used as an intermediate model by "
"'invalid_models_tests.Group.field', but it has more than one "
"foreign key from 'Group', which is ambiguous. You must "
"specify which foreign key Django should use via the "
"through_fields keyword argument.",
hint=(
"If you want to create a recursive relationship, use "
'ManyToManyField("self", through="AmbiguousRelationship").'
),
obj=field,
id="fields.E334",
),
],
)
def test_ambiguous_relationship_model_to(self):
class Person(models.Model):
pass
class Group(models.Model):
field = models.ManyToManyField(
"Person", through="AmbiguousRelationship", related_name="tertiary"
)
class AmbiguousRelationship(models.Model):
# Too much foreign keys to Person.
first_person = models.ForeignKey(
Person, models.CASCADE, related_name="first"
)
second_person = models.ForeignKey(
Person, models.CASCADE, related_name="second"
)
second_model = models.ForeignKey(Group, models.CASCADE)
field = Group._meta.get_field("field")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"The model is used as an intermediate model by "
"'invalid_models_tests.Group.field', but it has more than one "
"foreign key to 'Person', which is ambiguous. You must specify "
"which foreign key Django should use via the through_fields "
"keyword argument.",
hint=(
"If you want to create a recursive relationship, use "
'ManyToManyField("self", through="AmbiguousRelationship").'
),
obj=field,
id="fields.E335",
),
],
)
def test_relationship_model_with_foreign_key_to_wrong_model(self):
class WrongModel(models.Model):
pass
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField("Person", through="InvalidRelationship")
class InvalidRelationship(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
wrong_foreign_key = models.ForeignKey(WrongModel, models.CASCADE)
# The last foreign key should point to Group model.
field = Group._meta.get_field("members")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"The model is used as an intermediate model by "
"'invalid_models_tests.Group.members', but it does not "
"have a foreign key to 'Group' or 'Person'.",
obj=InvalidRelationship,
id="fields.E336",
),
],
)
def test_relationship_model_missing_foreign_key(self):
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField("Person", through="InvalidRelationship")
class InvalidRelationship(models.Model):
group = models.ForeignKey(Group, models.CASCADE)
# No foreign key to Person
field = Group._meta.get_field("members")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"The model is used as an intermediate model by "
"'invalid_models_tests.Group.members', but it does not have "
"a foreign key to 'Group' or 'Person'.",
obj=InvalidRelationship,
id="fields.E336",
),
],
)
def test_missing_relationship_model(self):
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField("Person", through="MissingM2MModel")
field = Group._meta.get_field("members")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"Field specifies a many-to-many relation through model "
"'MissingM2MModel', which has not been installed.",
obj=field,
id="fields.E331",
),
],
)
def test_missing_relationship_model_on_model_check(self):
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField("Person", through="MissingM2MModel")
self.assertEqual(
Group.check(),
[
Error(
"Field specifies a many-to-many relation through model "
"'MissingM2MModel', which has not been installed.",
obj=Group._meta.get_field("members"),
id="fields.E331",
),
],
)
@isolate_apps("invalid_models_tests")
def test_many_to_many_through_isolate_apps_model(self):
"""
#25723 - Through model registration lookup should be run against the
field's model registry.
"""
class GroupMember(models.Model):
person = models.ForeignKey("Person", models.CASCADE)
group = models.ForeignKey("Group", models.CASCADE)
class Person(models.Model):
pass
class Group(models.Model):
members = models.ManyToManyField("Person", through="GroupMember")
field = Group._meta.get_field("members")
self.assertEqual(field.check(from_model=Group), [])
def test_too_many_foreign_keys_in_self_referential_model(self):
class Person(models.Model):
friends = models.ManyToManyField(
"self", through="InvalidRelationship", symmetrical=False
)
class InvalidRelationship(models.Model):
first = models.ForeignKey(
Person, models.CASCADE, related_name="rel_from_set_2"
)
second = models.ForeignKey(
Person, models.CASCADE, related_name="rel_to_set_2"
)
third = models.ForeignKey(
Person, models.CASCADE, related_name="too_many_by_far"
)
field = Person._meta.get_field("friends")
self.assertEqual(
field.check(from_model=Person),
[
Error(
"The model is used as an intermediate model by "
"'invalid_models_tests.Person.friends', but it has more than two "
"foreign keys to 'Person', which is ambiguous. You must specify "
"which two foreign keys Django should use via the through_fields "
"keyword argument.",
hint=(
"Use through_fields to specify which two foreign keys Django "
"should use."
),
obj=InvalidRelationship,
id="fields.E333",
),
],
)
def test_foreign_key_to_abstract_model(self):
class AbstractModel(models.Model):
class Meta:
abstract = True
class Model(models.Model):
rel_string_foreign_key = models.ForeignKey("AbstractModel", models.CASCADE)
rel_class_foreign_key = models.ForeignKey(AbstractModel, models.CASCADE)
fields = [
Model._meta.get_field("rel_string_foreign_key"),
Model._meta.get_field("rel_class_foreign_key"),
]
expected_error = Error(
"Field defines a relation with model 'AbstractModel', "
"which is either not installed, or is abstract.",
id="fields.E300",
)
for field in fields:
expected_error.obj = field
self.assertEqual(field.check(), [expected_error])
def test_m2m_to_abstract_model(self):
class AbstractModel(models.Model):
class Meta:
abstract = True
class Model(models.Model):
rel_string_m2m = models.ManyToManyField("AbstractModel")
rel_class_m2m = models.ManyToManyField(AbstractModel)
fields = [
Model._meta.get_field("rel_string_m2m"),
Model._meta.get_field("rel_class_m2m"),
]
expected_error = Error(
"Field defines a relation with model 'AbstractModel', "
"which is either not installed, or is abstract.",
id="fields.E300",
)
for field in fields:
expected_error.obj = field
self.assertEqual(field.check(from_model=Model), [expected_error])
def test_unique_m2m(self):
class Person(models.Model):
name = models.CharField(max_length=5)
class Group(models.Model):
members = models.ManyToManyField("Person", unique=True)
field = Group._meta.get_field("members")
self.assertEqual(
field.check(from_model=Group),
[
Error(
"ManyToManyFields cannot be unique.",
obj=field,
id="fields.E330",
),
],
)
def test_foreign_key_to_non_unique_field(self):
class Target(models.Model):
bad = models.IntegerField() # No unique=True
class Model(models.Model):
foreign_key = models.ForeignKey("Target", models.CASCADE, to_field="bad")
field = Model._meta.get_field("foreign_key")
self.assertEqual(
field.check(),
[
Error(
"'Target.bad' must be unique because it is referenced by a foreign "
"key.",
hint=(
"Add unique=True to this field or add a UniqueConstraint "
"(without condition) in the model Meta.constraints."
),
obj=field,
id="fields.E311",
),
],
)
def test_foreign_key_to_non_unique_field_under_explicit_model(self):
class Target(models.Model):
bad = models.IntegerField()
class Model(models.Model):
field = models.ForeignKey(Target, models.CASCADE, to_field="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'Target.bad' must be unique because it is referenced by a foreign "
"key.",
hint=(
"Add unique=True to this field or add a UniqueConstraint "
"(without condition) in the model Meta.constraints."
),
obj=field,
id="fields.E311",
),
],
)
def test_foreign_key_to_partially_unique_field(self):
class Target(models.Model):
source = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["source"],
name="tfktpuf_partial_unique",
condition=models.Q(pk__gt=2),
),
]
class Model(models.Model):
field = models.ForeignKey(Target, models.CASCADE, to_field="source")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'Target.source' must be unique because it is referenced by a "
"foreign key.",
hint=(
"Add unique=True to this field or add a UniqueConstraint "
"(without condition) in the model Meta.constraints."
),
obj=field,
id="fields.E311",
),
],
)
def test_foreign_key_to_unique_field_with_meta_constraint(self):
class Target(models.Model):
source = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["source"],
name="tfktufwmc_unique",
),
]
class Model(models.Model):
field = models.ForeignKey(Target, models.CASCADE, to_field="source")
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_foreign_object_to_non_unique_fields(self):
class Person(models.Model):
# Note that both fields are not unique.
country_id = models.IntegerField()
city_id = models.IntegerField()
class MMembership(models.Model):
person_country_id = models.IntegerField()
person_city_id = models.IntegerField()
person = models.ForeignObject(
Person,
on_delete=models.CASCADE,
from_fields=["person_country_id", "person_city_id"],
to_fields=["country_id", "city_id"],
)
field = MMembership._meta.get_field("person")
self.assertEqual(
field.check(),
[
Error(
"No subset of the fields 'country_id', 'city_id' on model 'Person' "
"is unique.",
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together or a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints)."
),
obj=field,
id="fields.E310",
)
],
)
def test_foreign_object_to_partially_unique_field(self):
class Person(models.Model):
country_id = models.IntegerField()
city_id = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["country_id", "city_id"],
name="tfotpuf_partial_unique",
condition=models.Q(pk__gt=2),
),
]
class MMembership(models.Model):
person_country_id = models.IntegerField()
person_city_id = models.IntegerField()
person = models.ForeignObject(
Person,
on_delete=models.CASCADE,
from_fields=["person_country_id", "person_city_id"],
to_fields=["country_id", "city_id"],
)
field = MMembership._meta.get_field("person")
self.assertEqual(
field.check(),
[
Error(
"No subset of the fields 'country_id', 'city_id' on model "
"'Person' is unique.",
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together or a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints)."
),
obj=field,
id="fields.E310",
),
],
)
def test_foreign_object_to_unique_field_with_meta_constraint(self):
class Person(models.Model):
country_id = models.IntegerField()
city_id = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["country_id", "city_id"],
name="tfotpuf_unique",
),
]
class MMembership(models.Model):
person_country_id = models.IntegerField()
person_city_id = models.IntegerField()
person = models.ForeignObject(
Person,
on_delete=models.CASCADE,
from_fields=["person_country_id", "person_city_id"],
to_fields=["country_id", "city_id"],
)
field = MMembership._meta.get_field("person")
self.assertEqual(field.check(), [])
def test_on_delete_set_null_on_non_nullable_field(self):
class Person(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey("Person", models.SET_NULL)
field = Model._meta.get_field("foreign_key")
self.assertEqual(
field.check(),
[
Error(
"Field specifies on_delete=SET_NULL, but cannot be null.",
hint=(
"Set null=True argument on the field, or change the on_delete "
"rule."
),
obj=field,
id="fields.E320",
),
],
)
def test_on_delete_set_default_without_default_value(self):
class Person(models.Model):
pass
class Model(models.Model):
foreign_key = models.ForeignKey("Person", models.SET_DEFAULT)
field = Model._meta.get_field("foreign_key")
self.assertEqual(
field.check(),
[
Error(
"Field specifies on_delete=SET_DEFAULT, but has no default value.",
hint="Set a default value, or change the on_delete rule.",
obj=field,
id="fields.E321",
),
],
)
def test_nullable_primary_key(self):
class Model(models.Model):
field = models.IntegerField(primary_key=True, null=True)
field = Model._meta.get_field("field")
with mock.patch.object(
connection.features, "interprets_empty_strings_as_nulls", False
):
results = field.check()
self.assertEqual(
results,
[
Error(
"Primary keys must not have null=True.",
hint=(
"Set null=False on the field, or remove primary_key=True "
"argument."
),
obj=field,
id="fields.E007",
),
],
)
def test_not_swapped_model(self):
class SwappableModel(models.Model):
# A model that can be, but isn't swapped out. References to this
# model should *not* raise any validation error.
class Meta:
swappable = "TEST_SWAPPABLE_MODEL"
class Model(models.Model):
explicit_fk = models.ForeignKey(
SwappableModel,
models.CASCADE,
related_name="explicit_fk",
)
implicit_fk = models.ForeignKey(
"invalid_models_tests.SwappableModel",
models.CASCADE,
related_name="implicit_fk",
)
explicit_m2m = models.ManyToManyField(
SwappableModel, related_name="explicit_m2m"
)
implicit_m2m = models.ManyToManyField(
"invalid_models_tests.SwappableModel",
related_name="implicit_m2m",
)
explicit_fk = Model._meta.get_field("explicit_fk")
self.assertEqual(explicit_fk.check(), [])
implicit_fk = Model._meta.get_field("implicit_fk")
self.assertEqual(implicit_fk.check(), [])
explicit_m2m = Model._meta.get_field("explicit_m2m")
self.assertEqual(explicit_m2m.check(from_model=Model), [])
implicit_m2m = Model._meta.get_field("implicit_m2m")
self.assertEqual(implicit_m2m.check(from_model=Model), [])
@override_settings(TEST_SWAPPED_MODEL="invalid_models_tests.Replacement")
def test_referencing_to_swapped_model(self):
class Replacement(models.Model):
pass
class SwappedModel(models.Model):
class Meta:
swappable = "TEST_SWAPPED_MODEL"
class Model(models.Model):
explicit_fk = models.ForeignKey(
SwappedModel,
models.CASCADE,
related_name="explicit_fk",
)
implicit_fk = models.ForeignKey(
"invalid_models_tests.SwappedModel",
models.CASCADE,
related_name="implicit_fk",
)
explicit_m2m = models.ManyToManyField(
SwappedModel, related_name="explicit_m2m"
)
implicit_m2m = models.ManyToManyField(
"invalid_models_tests.SwappedModel",
related_name="implicit_m2m",
)
fields = [
Model._meta.get_field("explicit_fk"),
Model._meta.get_field("implicit_fk"),
Model._meta.get_field("explicit_m2m"),
Model._meta.get_field("implicit_m2m"),
]
expected_error = Error(
(
"Field defines a relation with the model "
"'invalid_models_tests.SwappedModel', which has been swapped out."
),
hint="Update the relation to point at 'settings.TEST_SWAPPED_MODEL'.",
id="fields.E301",
)
for field in fields:
expected_error.obj = field
self.assertEqual(field.check(from_model=Model), [expected_error])
def test_related_field_has_invalid_related_name(self):
digit = 0
illegal_non_alphanumeric = "!"
whitespace = "\t"
invalid_related_names = [
"%s_begins_with_digit" % digit,
"%s_begins_with_illegal_non_alphanumeric" % illegal_non_alphanumeric,
"%s_begins_with_whitespace" % whitespace,
"contains_%s_illegal_non_alphanumeric" % illegal_non_alphanumeric,
"contains_%s_whitespace" % whitespace,
"ends_with_with_illegal_non_alphanumeric_%s" % illegal_non_alphanumeric,
"ends_with_whitespace_%s" % whitespace,
"with", # a Python keyword
"related_name\n",
"",
",", # non-ASCII
]
class Parent(models.Model):
pass
for invalid_related_name in invalid_related_names:
Child = type(
"Child%s" % invalid_related_name,
(models.Model,),
{
"parent": models.ForeignKey(
"Parent", models.CASCADE, related_name=invalid_related_name
),
"__module__": Parent.__module__,
},
)
field = Child._meta.get_field("parent")
self.assertEqual(
Child.check(),
[
Error(
"The name '%s' is invalid related_name for field Child%s.parent"
% (invalid_related_name, invalid_related_name),
hint=(
"Related name must be a valid Python identifier or end "
"with a '+'"
),
obj=field,
id="fields.E306",
),
],
)
def test_related_field_has_valid_related_name(self):
lowercase = "a"
uppercase = "A"
digit = 0
related_names = [
"%s_starts_with_lowercase" % lowercase,
"%s_tarts_with_uppercase" % uppercase,
"_starts_with_underscore",
"contains_%s_digit" % digit,
"ends_with_plus+",
"_+",
"+",
"試",
"試驗+",
]
class Parent(models.Model):
pass
for related_name in related_names:
Child = type(
"Child%s" % related_name,
(models.Model,),
{
"parent": models.ForeignKey(
"Parent", models.CASCADE, related_name=related_name
),
"__module__": Parent.__module__,
},
)
self.assertEqual(Child.check(), [])
def test_to_fields_exist(self):
class Parent(models.Model):
pass
class Child(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
parent = models.ForeignObject(
Parent,
on_delete=models.SET_NULL,
from_fields=("a", "b"),
to_fields=("a", "b"),
)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(),
[
Error(
"The to_field 'a' doesn't exist on the related model "
"'invalid_models_tests.Parent'.",
obj=field,
id="fields.E312",
),
Error(
"The to_field 'b' doesn't exist on the related model "
"'invalid_models_tests.Parent'.",
obj=field,
id="fields.E312",
),
],
)
def test_to_fields_not_checked_if_related_model_doesnt_exist(self):
class Child(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
parent = models.ForeignObject(
"invalid_models_tests.Parent",
on_delete=models.SET_NULL,
from_fields=("a", "b"),
to_fields=("a", "b"),
)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(),
[
Error(
"Field defines a relation with model "
"'invalid_models_tests.Parent', which is either not installed, or "
"is abstract.",
id="fields.E300",
obj=field,
),
],
)
def test_invalid_related_query_name(self):
class Target(models.Model):
pass
class Model(models.Model):
first = models.ForeignKey(
Target, models.CASCADE, related_name="contains__double"
)
second = models.ForeignKey(
Target, models.CASCADE, related_query_name="ends_underscore_"
)
self.assertEqual(
Model.check(),
[
Error(
"Reverse query name 'contains__double' must not contain '__'.",
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=Model._meta.get_field("first"),
id="fields.E309",
),
Error(
"Reverse query name 'ends_underscore_' must not end with an "
"underscore.",
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=Model._meta.get_field("second"),
id="fields.E308",
),
],
)
@isolate_apps("invalid_models_tests")
class AccessorClashTests(SimpleTestCase):
def test_fk_to_integer(self):
self._test_accessor_clash(
target=models.IntegerField(),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_fk_to_fk(self):
self._test_accessor_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_fk_to_m2m(self):
self._test_accessor_clash(
target=models.ManyToManyField("Another"),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_m2m_to_integer(self):
self._test_accessor_clash(
target=models.IntegerField(), relative=models.ManyToManyField("Target")
)
def test_m2m_to_fk(self):
self._test_accessor_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ManyToManyField("Target"),
)
def test_m2m_to_m2m(self):
self._test_accessor_clash(
target=models.ManyToManyField("Another"),
relative=models.ManyToManyField("Target"),
)
def _test_accessor_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
model_set = target
class Model(models.Model):
rel = relative
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Target.model_set' for "
"'invalid_models_tests.Model.rel' clashes with field name "
"'invalid_models_tests.Target.model_set'.",
hint=(
"Rename field 'invalid_models_tests.Target.model_set', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.rel'."
),
obj=Model._meta.get_field("rel"),
id="fields.E302",
),
],
)
def test_clash_between_accessors(self):
class Target(models.Model):
pass
class Model(models.Model):
foreign = models.ForeignKey(Target, models.CASCADE)
m2m = models.ManyToManyField(Target)
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Target.model_set' for "
"'invalid_models_tests.Model.foreign' clashes with reverse "
"accessor for 'invalid_models_tests.Model.m2m'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.foreign' or "
"'invalid_models_tests.Model.m2m'."
),
obj=Model._meta.get_field("foreign"),
id="fields.E304",
),
Error(
"Reverse accessor 'Target.model_set' for "
"'invalid_models_tests.Model.m2m' clashes with reverse "
"accessor for 'invalid_models_tests.Model.foreign'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.m2m' or "
"'invalid_models_tests.Model.foreign'."
),
obj=Model._meta.get_field("m2m"),
id="fields.E304",
),
],
)
def test_m2m_to_m2m_with_inheritance(self):
"""Ref #22047."""
class Target(models.Model):
pass
class Model(models.Model):
children = models.ManyToManyField(
"Child", related_name="m2m_clash", related_query_name="no_clash"
)
class Parent(models.Model):
m2m_clash = models.ManyToManyField("Target")
class Child(Parent):
pass
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Child.m2m_clash' for "
"'invalid_models_tests.Model.children' clashes with field "
"name 'invalid_models_tests.Child.m2m_clash'.",
hint=(
"Rename field 'invalid_models_tests.Child.m2m_clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.children'."
),
obj=Model._meta.get_field("children"),
id="fields.E302",
)
],
)
def test_no_clash_for_hidden_related_name(self):
class Stub(models.Model):
pass
class ManyToManyRel(models.Model):
thing1 = models.ManyToManyField(Stub, related_name="+")
thing2 = models.ManyToManyField(Stub, related_name="+")
class FKRel(models.Model):
thing1 = models.ForeignKey(Stub, models.CASCADE, related_name="+")
thing2 = models.ForeignKey(Stub, models.CASCADE, related_name="+")
self.assertEqual(ManyToManyRel.check(), [])
self.assertEqual(FKRel.check(), [])
@isolate_apps("invalid_models_tests")
class ReverseQueryNameClashTests(SimpleTestCase):
def test_fk_to_integer(self):
self._test_reverse_query_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_fk_to_fk(self):
self._test_reverse_query_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_fk_to_m2m(self):
self._test_reverse_query_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ForeignKey("Target", models.CASCADE),
)
def test_m2m_to_integer(self):
self._test_reverse_query_name_clash(
target=models.IntegerField(), relative=models.ManyToManyField("Target")
)
def test_m2m_to_fk(self):
self._test_reverse_query_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ManyToManyField("Target"),
)
def test_m2m_to_m2m(self):
self._test_reverse_query_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ManyToManyField("Target"),
)
def _test_reverse_query_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
model = target
class Model(models.Model):
rel = relative
self.assertEqual(
Model.check(),
[
Error(
"Reverse query name for 'invalid_models_tests.Model.rel' "
"clashes with field name 'invalid_models_tests.Target.model'.",
hint=(
"Rename field 'invalid_models_tests.Target.model', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.rel'."
),
obj=Model._meta.get_field("rel"),
id="fields.E303",
),
],
)
@modify_settings(INSTALLED_APPS={"append": "basic"})
@isolate_apps("basic", "invalid_models_tests")
def test_no_clash_across_apps_without_accessor(self):
class Target(models.Model):
class Meta:
app_label = "invalid_models_tests"
class Model(models.Model):
m2m = models.ManyToManyField(Target, related_name="+")
class Meta:
app_label = "basic"
def _test():
# Define model with the same name.
class Model(models.Model):
m2m = models.ManyToManyField(Target, related_name="+")
class Meta:
app_label = "invalid_models_tests"
self.assertEqual(Model.check(), [])
_test()
self.assertEqual(Model.check(), [])
@isolate_apps("invalid_models_tests")
class ExplicitRelatedNameClashTests(SimpleTestCase):
def test_fk_to_integer(self):
self._test_explicit_related_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"),
)
def test_fk_to_fk(self):
self._test_explicit_related_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"),
)
def test_fk_to_m2m(self):
self._test_explicit_related_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"),
)
def test_m2m_to_integer(self):
self._test_explicit_related_name_clash(
target=models.IntegerField(),
relative=models.ManyToManyField("Target", related_name="clash"),
)
def test_m2m_to_fk(self):
self._test_explicit_related_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ManyToManyField("Target", related_name="clash"),
)
def test_m2m_to_m2m(self):
self._test_explicit_related_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ManyToManyField("Target", related_name="clash"),
)
def _test_explicit_related_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
clash = target
class Model(models.Model):
rel = relative
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Target.clash' for "
"'invalid_models_tests.Model.rel' clashes with field name "
"'invalid_models_tests.Target.clash'.",
hint=(
"Rename field 'invalid_models_tests.Target.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.rel'."
),
obj=Model._meta.get_field("rel"),
id="fields.E302",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.rel' "
"clashes with field name 'invalid_models_tests.Target.clash'.",
hint=(
"Rename field 'invalid_models_tests.Target.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.rel'."
),
obj=Model._meta.get_field("rel"),
id="fields.E303",
),
],
)
@isolate_apps("invalid_models_tests")
class ExplicitRelatedQueryNameClashTests(SimpleTestCase):
def test_fk_to_integer(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.IntegerField(),
relative=models.ForeignKey(
"Target",
models.CASCADE,
related_name=related_name,
related_query_name="clash",
),
)
def test_hidden_fk_to_integer(self, related_name=None):
self.test_fk_to_integer(related_name="+")
def test_fk_to_fk(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ForeignKey(
"Target",
models.CASCADE,
related_name=related_name,
related_query_name="clash",
),
)
def test_hidden_fk_to_fk(self):
self.test_fk_to_fk(related_name="+")
def test_fk_to_m2m(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ForeignKey(
"Target",
models.CASCADE,
related_name=related_name,
related_query_name="clash",
),
)
def test_hidden_fk_to_m2m(self):
self.test_fk_to_m2m(related_name="+")
def test_m2m_to_integer(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.IntegerField(),
relative=models.ManyToManyField(
"Target", related_name=related_name, related_query_name="clash"
),
)
def test_hidden_m2m_to_integer(self):
self.test_m2m_to_integer(related_name="+")
def test_m2m_to_fk(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.ForeignKey("Another", models.CASCADE),
relative=models.ManyToManyField(
"Target", related_name=related_name, related_query_name="clash"
),
)
def test_hidden_m2m_to_fk(self):
self.test_m2m_to_fk(related_name="+")
def test_m2m_to_m2m(self, related_name=None):
self._test_explicit_related_query_name_clash(
target=models.ManyToManyField("Another"),
relative=models.ManyToManyField(
"Target",
related_name=related_name,
related_query_name="clash",
),
)
def test_hidden_m2m_to_m2m(self):
self.test_m2m_to_m2m(related_name="+")
def _test_explicit_related_query_name_clash(self, target, relative):
class Another(models.Model):
pass
class Target(models.Model):
clash = target
class Model(models.Model):
rel = relative
self.assertEqual(
Model.check(),
[
Error(
"Reverse query name for 'invalid_models_tests.Model.rel' "
"clashes with field name 'invalid_models_tests.Target.clash'.",
hint=(
"Rename field 'invalid_models_tests.Target.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.rel'."
),
obj=Model._meta.get_field("rel"),
id="fields.E303",
),
],
)
@isolate_apps("invalid_models_tests")
class SelfReferentialM2MClashTests(SimpleTestCase):
def test_clash_between_accessors(self):
class Model(models.Model):
first_m2m = models.ManyToManyField("self", symmetrical=False)
second_m2m = models.ManyToManyField("self", symmetrical=False)
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Model.model_set' for "
"'invalid_models_tests.Model.first_m2m' clashes with reverse "
"accessor for 'invalid_models_tests.Model.second_m2m'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.first_m2m' or "
"'invalid_models_tests.Model.second_m2m'."
),
obj=Model._meta.get_field("first_m2m"),
id="fields.E304",
),
Error(
"Reverse accessor 'Model.model_set' for "
"'invalid_models_tests.Model.second_m2m' clashes with reverse "
"accessor for 'invalid_models_tests.Model.first_m2m'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.second_m2m' or "
"'invalid_models_tests.Model.first_m2m'."
),
obj=Model._meta.get_field("second_m2m"),
id="fields.E304",
),
],
)
def test_accessor_clash(self):
class Model(models.Model):
model_set = models.ManyToManyField("self", symmetrical=False)
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Model.model_set' for "
"'invalid_models_tests.Model.model_set' clashes with field "
"name 'invalid_models_tests.Model.model_set'.",
hint=(
"Rename field 'invalid_models_tests.Model.model_set', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.model_set'."
),
obj=Model._meta.get_field("model_set"),
id="fields.E302",
),
],
)
def test_reverse_query_name_clash(self):
class Model(models.Model):
model = models.ManyToManyField("self", symmetrical=False)
self.assertEqual(
Model.check(),
[
Error(
"Reverse query name for 'invalid_models_tests.Model.model' "
"clashes with field name 'invalid_models_tests.Model.model'.",
hint=(
"Rename field 'invalid_models_tests.Model.model', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.model'."
),
obj=Model._meta.get_field("model"),
id="fields.E303",
),
],
)
def test_clash_under_explicit_related_name(self):
class Model(models.Model):
clash = models.IntegerField()
m2m = models.ManyToManyField(
"self", symmetrical=False, related_name="clash"
)
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Model.clash' for "
"'invalid_models_tests.Model.m2m' clashes with field name "
"'invalid_models_tests.Model.clash'.",
hint=(
"Rename field 'invalid_models_tests.Model.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.m2m'."
),
obj=Model._meta.get_field("m2m"),
id="fields.E302",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.m2m' "
"clashes with field name 'invalid_models_tests.Model.clash'.",
hint=(
"Rename field 'invalid_models_tests.Model.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.m2m'."
),
obj=Model._meta.get_field("m2m"),
id="fields.E303",
),
],
)
def test_valid_model(self):
class Model(models.Model):
first = models.ManyToManyField(
"self", symmetrical=False, related_name="first_accessor"
)
second = models.ManyToManyField(
"self", symmetrical=False, related_name="second_accessor"
)
self.assertEqual(Model.check(), [])
@isolate_apps("invalid_models_tests")
class SelfReferentialFKClashTests(SimpleTestCase):
def test_accessor_clash(self):
class Model(models.Model):
model_set = models.ForeignKey("Model", models.CASCADE)
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Model.model_set' for "
"'invalid_models_tests.Model.model_set' clashes with field "
"name 'invalid_models_tests.Model.model_set'.",
hint=(
"Rename field 'invalid_models_tests.Model.model_set', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.model_set'."
),
obj=Model._meta.get_field("model_set"),
id="fields.E302",
),
],
)
def test_reverse_query_name_clash(self):
class Model(models.Model):
model = models.ForeignKey("Model", models.CASCADE)
self.assertEqual(
Model.check(),
[
Error(
"Reverse query name for 'invalid_models_tests.Model.model' "
"clashes with field name 'invalid_models_tests.Model.model'.",
hint=(
"Rename field 'invalid_models_tests.Model.model', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.model'."
),
obj=Model._meta.get_field("model"),
id="fields.E303",
),
],
)
def test_clash_under_explicit_related_name(self):
class Model(models.Model):
clash = models.CharField(max_length=10)
foreign = models.ForeignKey("Model", models.CASCADE, related_name="clash")
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Model.clash' for "
"'invalid_models_tests.Model.foreign' clashes with field name "
"'invalid_models_tests.Model.clash'.",
hint=(
"Rename field 'invalid_models_tests.Model.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.foreign'."
),
obj=Model._meta.get_field("foreign"),
id="fields.E302",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.foreign' "
"clashes with field name 'invalid_models_tests.Model.clash'.",
hint=(
"Rename field 'invalid_models_tests.Model.clash', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.foreign'."
),
obj=Model._meta.get_field("foreign"),
id="fields.E303",
),
],
)
@isolate_apps("invalid_models_tests")
class ComplexClashTests(SimpleTestCase):
# New tests should not be included here, because this is a single,
# self-contained sanity check, not a test of everything.
def test_complex_clash(self):
class Target(models.Model):
tgt_safe = models.CharField(max_length=10)
clash = models.CharField(max_length=10)
model = models.CharField(max_length=10)
clash1_set = models.CharField(max_length=10)
class Model(models.Model):
src_safe = models.CharField(max_length=10)
foreign_1 = models.ForeignKey(Target, models.CASCADE, related_name="id")
foreign_2 = models.ForeignKey(
Target, models.CASCADE, related_name="src_safe"
)
m2m_1 = models.ManyToManyField(Target, related_name="id")
m2m_2 = models.ManyToManyField(Target, related_name="src_safe")
self.assertEqual(
Model.check(),
[
Error(
"Reverse accessor 'Target.id' for "
"'invalid_models_tests.Model.foreign_1' clashes with field "
"name 'invalid_models_tests.Target.id'.",
hint=(
"Rename field 'invalid_models_tests.Target.id', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.foreign_1'."
),
obj=Model._meta.get_field("foreign_1"),
id="fields.E302",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.foreign_1' "
"clashes with field name 'invalid_models_tests.Target.id'.",
hint=(
"Rename field 'invalid_models_tests.Target.id', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.foreign_1'."
),
obj=Model._meta.get_field("foreign_1"),
id="fields.E303",
),
Error(
"Reverse accessor 'Target.id' for "
"'invalid_models_tests.Model.foreign_1' clashes with reverse "
"accessor for 'invalid_models_tests.Model.m2m_1'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.foreign_1' or "
"'invalid_models_tests.Model.m2m_1'."
),
obj=Model._meta.get_field("foreign_1"),
id="fields.E304",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.foreign_1' "
"clashes with reverse query name for "
"'invalid_models_tests.Model.m2m_1'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.foreign_1' or "
"'invalid_models_tests.Model.m2m_1'."
),
obj=Model._meta.get_field("foreign_1"),
id="fields.E305",
),
Error(
"Reverse accessor 'Target.src_safe' for "
"'invalid_models_tests.Model.foreign_2' clashes with reverse "
"accessor for 'invalid_models_tests.Model.m2m_2'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.foreign_2' or "
"'invalid_models_tests.Model.m2m_2'."
),
obj=Model._meta.get_field("foreign_2"),
id="fields.E304",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.foreign_2' "
"clashes with reverse query name for "
"'invalid_models_tests.Model.m2m_2'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.foreign_2' or "
"'invalid_models_tests.Model.m2m_2'."
),
obj=Model._meta.get_field("foreign_2"),
id="fields.E305",
),
Error(
"Reverse accessor 'Target.id' for "
"'invalid_models_tests.Model.m2m_1' clashes with field name "
"'invalid_models_tests.Target.id'.",
hint=(
"Rename field 'invalid_models_tests.Target.id', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.m2m_1'."
),
obj=Model._meta.get_field("m2m_1"),
id="fields.E302",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.m2m_1' "
"clashes with field name 'invalid_models_tests.Target.id'.",
hint=(
"Rename field 'invalid_models_tests.Target.id', or "
"add/change a related_name argument to the definition for "
"field 'invalid_models_tests.Model.m2m_1'."
),
obj=Model._meta.get_field("m2m_1"),
id="fields.E303",
),
Error(
"Reverse accessor 'Target.id' for "
"'invalid_models_tests.Model.m2m_1' clashes with reverse "
"accessor for 'invalid_models_tests.Model.foreign_1'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.m2m_1' or "
"'invalid_models_tests.Model.foreign_1'."
),
obj=Model._meta.get_field("m2m_1"),
id="fields.E304",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.m2m_1' "
"clashes with reverse query name for "
"'invalid_models_tests.Model.foreign_1'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.m2m_1' or "
"'invalid_models_tests.Model.foreign_1'."
),
obj=Model._meta.get_field("m2m_1"),
id="fields.E305",
),
Error(
"Reverse accessor 'Target.src_safe' for "
"'invalid_models_tests.Model.m2m_2' clashes with reverse "
"accessor for 'invalid_models_tests.Model.foreign_2'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.m2m_2' or "
"'invalid_models_tests.Model.foreign_2'."
),
obj=Model._meta.get_field("m2m_2"),
id="fields.E304",
),
Error(
"Reverse query name for 'invalid_models_tests.Model.m2m_2' "
"clashes with reverse query name for "
"'invalid_models_tests.Model.foreign_2'.",
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Model.m2m_2' or "
"'invalid_models_tests.Model.foreign_2'."
),
obj=Model._meta.get_field("m2m_2"),
id="fields.E305",
),
],
)
def test_clash_parent_link(self):
class Parent(models.Model):
pass
class Child(Parent):
other_parent = models.OneToOneField(Parent, models.CASCADE)
errors = [
(
"fields.E304",
"accessor",
" 'Parent.child'",
"parent_ptr",
"other_parent",
),
("fields.E305", "query name", "", "parent_ptr", "other_parent"),
(
"fields.E304",
"accessor",
" 'Parent.child'",
"other_parent",
"parent_ptr",
),
("fields.E305", "query name", "", "other_parent", "parent_ptr"),
]
self.assertEqual(
Child.check(),
[
Error(
"Reverse %s%s for 'invalid_models_tests.Child.%s' clashes with "
"reverse %s for 'invalid_models_tests.Child.%s'."
% (attr, rel_name, field_name, attr, clash_name),
hint=(
"Add or change a related_name argument to the definition "
"for 'invalid_models_tests.Child.%s' or "
"'invalid_models_tests.Child.%s'." % (field_name, clash_name)
),
obj=Child._meta.get_field(field_name),
id=error_id,
)
for error_id, attr, rel_name, field_name, clash_name in errors
],
)
@isolate_apps("invalid_models_tests")
class M2mThroughFieldsTests(SimpleTestCase):
def test_m2m_field_argument_validation(self):
"""
ManyToManyField accepts the ``through_fields`` kwarg
only if an intermediary table is specified.
"""
class Fan(models.Model):
pass
with self.assertRaisesMessage(
ValueError, "Cannot specify through_fields without a through model"
):
models.ManyToManyField(Fan, through_fields=("f1", "f2"))
def test_invalid_order(self):
"""
Mixing up the order of link fields to ManyToManyField.through_fields
triggers validation errors.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(
Fan, through="Invitation", through_fields=("invitee", "event")
)
class Invitation(models.Model):
event = models.ForeignKey(Event, models.CASCADE)
invitee = models.ForeignKey(Fan, models.CASCADE)
inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+")
field = Event._meta.get_field("invitees")
self.assertEqual(
field.check(from_model=Event),
[
Error(
"'Invitation.invitee' is not a foreign key to 'Event'.",
hint=(
"Did you mean one of the following foreign keys to 'Event': "
"event?"
),
obj=field,
id="fields.E339",
),
Error(
"'Invitation.event' is not a foreign key to 'Fan'.",
hint=(
"Did you mean one of the following foreign keys to 'Fan': "
"invitee, inviter?"
),
obj=field,
id="fields.E339",
),
],
)
def test_invalid_field(self):
"""
Providing invalid field names to ManyToManyField.through_fields
triggers validation errors.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(
Fan,
through="Invitation",
through_fields=("invalid_field_1", "invalid_field_2"),
)
class Invitation(models.Model):
event = models.ForeignKey(Event, models.CASCADE)
invitee = models.ForeignKey(Fan, models.CASCADE)
inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+")
field = Event._meta.get_field("invitees")
self.assertEqual(
field.check(from_model=Event),
[
Error(
"The intermediary model 'invalid_models_tests.Invitation' has no "
"field 'invalid_field_1'.",
hint=(
"Did you mean one of the following foreign keys to 'Event': "
"event?"
),
obj=field,
id="fields.E338",
),
Error(
"The intermediary model 'invalid_models_tests.Invitation' has no "
"field 'invalid_field_2'.",
hint=(
"Did you mean one of the following foreign keys to 'Fan': "
"invitee, inviter?"
),
obj=field,
id="fields.E338",
),
],
)
def test_explicit_field_names(self):
"""
If ``through_fields`` kwarg is given, it must specify both
link fields of the intermediary table.
"""
class Fan(models.Model):
pass
class Event(models.Model):
invitees = models.ManyToManyField(
Fan, through="Invitation", through_fields=(None, "invitee")
)
class Invitation(models.Model):
event = models.ForeignKey(Event, models.CASCADE)
invitee = models.ForeignKey(Fan, models.CASCADE)
inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+")
field = Event._meta.get_field("invitees")
self.assertEqual(
field.check(from_model=Event),
[
Error(
"Field specifies 'through_fields' but does not provide the names "
"of the two link fields that should be used for the relation "
"through model 'invalid_models_tests.Invitation'.",
hint=(
"Make sure you specify 'through_fields' as "
"through_fields=('field1', 'field2')"
),
obj=field,
id="fields.E337",
),
],
)
def test_superset_foreign_object(self):
class Parent(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
c = models.PositiveIntegerField()
class Meta:
unique_together = (("a", "b", "c"),)
class Child(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
value = models.CharField(max_length=255)
parent = models.ForeignObject(
Parent,
on_delete=models.SET_NULL,
from_fields=("a", "b"),
to_fields=("a", "b"),
related_name="children",
)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(from_model=Child),
[
Error(
"No subset of the fields 'a', 'b' on model 'Parent' is unique.",
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together or a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints)."
),
obj=field,
id="fields.E310",
),
],
)
def test_intersection_foreign_object(self):
class Parent(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
c = models.PositiveIntegerField()
d = models.PositiveIntegerField()
class Meta:
unique_together = (("a", "b", "c"),)
class Child(models.Model):
a = models.PositiveIntegerField()
b = models.PositiveIntegerField()
d = models.PositiveIntegerField()
value = models.CharField(max_length=255)
parent = models.ForeignObject(
Parent,
on_delete=models.SET_NULL,
from_fields=("a", "b", "d"),
to_fields=("a", "b", "d"),
related_name="children",
)
field = Child._meta.get_field("parent")
self.assertEqual(
field.check(from_model=Child),
[
Error(
"No subset of the fields 'a', 'b', 'd' on model 'Parent' is "
"unique.",
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together or a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints)."
),
obj=field,
id="fields.E310",
),
],
)
|
e0f3368f9934c1e4cbaae0b4f0e6ddbeb6212206b1fb4fe92ac519c3a0c6874d | import unittest
from django.core.checks import Error, Warning
from django.core.checks.model_checks import _check_lazy_references
from django.db import connection, connections, models
from django.db.models.functions import Abs, Lower, Round
from django.db.models.signals import post_init
from django.test import SimpleTestCase, TestCase, ignore_warnings, skipUnlessDBFeature
from django.test.utils import isolate_apps, override_settings, register_lookup
from django.utils.deprecation import RemovedInDjango51Warning
class EmptyRouter:
pass
def get_max_column_name_length():
allowed_len = None
db_alias = None
for db in ("default", "other"):
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is not None and not connection.features.truncates_names:
if allowed_len is None or max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
return (allowed_len, db_alias)
@isolate_apps("invalid_models_tests")
@ignore_warnings(category=RemovedInDjango51Warning)
class IndexTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = 42
self.assertEqual(
Model.check(),
[
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id="models.E008",
),
],
)
def test_non_list(self):
class Model(models.Model):
class Meta:
index_together = "not-a-list"
self.assertEqual(
Model.check(),
[
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id="models.E008",
),
],
)
def test_list_containing_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = [("a", "b"), 42]
self.assertEqual(
Model.check(),
[
Error(
"All 'index_together' elements must be lists or tuples.",
obj=Model,
id="models.E009",
),
],
)
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
index_together = [["missing_field"]]
self.assertEqual(
Model.check(),
[
Error(
"'index_together' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
index_together = [["field2", "field1"]]
self.assertEqual(
Bar.check(),
[
Error(
"'index_together' refers to field 'field1' which is not "
"local to model 'Bar'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Bar,
id="models.E016",
),
],
)
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
index_together = [["m2m"]]
self.assertEqual(
Model.check(),
[
Error(
"'index_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'index_together'.",
obj=Model,
id="models.E013",
),
],
)
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_1"
)
foo_2 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_2"
)
class Meta:
index_together = [["foo_1_id", "foo_2"]]
self.assertEqual(Bar.check(), [])
# unique_together tests are very similar to index_together tests.
@isolate_apps("invalid_models_tests")
class UniqueTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
unique_together = 42
self.assertEqual(
Model.check(),
[
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id="models.E010",
),
],
)
def test_list_containing_non_iterable(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
unique_together = [("a", "b"), 42]
self.assertEqual(
Model.check(),
[
Error(
"All 'unique_together' elements must be lists or tuples.",
obj=Model,
id="models.E011",
),
],
)
def test_non_list(self):
class Model(models.Model):
class Meta:
unique_together = "not-a-list"
self.assertEqual(
Model.check(),
[
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id="models.E010",
),
],
)
def test_valid_model(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
# unique_together can be a simple tuple
unique_together = ("one", "two")
self.assertEqual(Model.check(), [])
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
unique_together = [["missing_field"]]
self.assertEqual(
Model.check(),
[
Error(
"'unique_together' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_pointing_to_m2m(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
unique_together = [["m2m"]]
self.assertEqual(
Model.check(),
[
Error(
"'unique_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'unique_together'.",
obj=Model,
id="models.E013",
),
],
)
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_1"
)
foo_2 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_2"
)
class Meta:
unique_together = [["foo_1_id", "foo_2"]]
self.assertEqual(Bar.check(), [])
@isolate_apps("invalid_models_tests")
class IndexesTests(TestCase):
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=["missing_field"], name="name")]
self.assertEqual(
Model.check(),
[
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
indexes = [models.Index(fields=["m2m"], name="name")]
self.assertEqual(
Model.check(),
[
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id="models.E013",
),
],
)
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
indexes = [models.Index(fields=["field2", "field1"], name="name")]
self.assertEqual(
Bar.check(),
[
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Bar,
id="models.E016",
),
],
)
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_1"
)
foo_2 = models.ForeignKey(
Foo, on_delete=models.CASCADE, related_name="bar_2"
)
class Meta:
indexes = [
models.Index(fields=["foo_1_id", "foo_2"], name="index_name")
]
self.assertEqual(Bar.check(), [])
def test_name_constraints(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(fields=["id"], name="_index_name"),
models.Index(fields=["id"], name="5index_name"),
]
self.assertEqual(
Model.check(),
[
Error(
"The index name '%sindex_name' cannot start with an "
"underscore or a number." % prefix,
obj=Model,
id="models.E033",
)
for prefix in ("_", "5")
],
)
def test_max_name_length(self):
index_name = "x" * 31
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=["id"], name=index_name)]
self.assertEqual(
Model.check(),
[
Error(
"The index name '%s' cannot be longer than 30 characters."
% index_name,
obj=Model,
id="models.E034",
),
],
)
def test_index_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
indexes = [
models.Index(
fields=["age"],
name="index_age_gte_10",
condition=models.Q(age__gte=10),
),
]
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_partial_indexes
else [
Warning(
"%s does not support indexes with conditions."
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning if you "
"don't care about it."
),
obj=Model,
id="models.W037",
)
]
)
self.assertEqual(errors, expected)
def test_index_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_partial_indexes"}
indexes = [
models.Index(
fields=["age"],
name="index_age_gte_10",
condition=models.Q(age__gte=10),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_index_with_include(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
indexes = [
models.Index(
fields=["age"],
name="index_age_include_id",
include=["id"],
),
]
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_covering_indexes
else [
Warning(
"%s does not support indexes with non-key columns."
% connection.display_name,
hint=(
"Non-key columns will be ignored. Silence this warning if "
"you don't care about it."
),
obj=Model,
id="models.W040",
)
]
)
self.assertEqual(errors, expected)
def test_index_with_include_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_covering_indexes"}
indexes = [
models.Index(
fields=["age"],
name="index_age_include_id",
include=["id"],
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_covering_indexes")
def test_index_include_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(fields=["id"], include=["missing_field"], name="name"),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_index_include_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
indexes = [models.Index(fields=["id"], include=["m2m"], name="name")]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id="models.E013",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_index_include_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
indexes = [
models.Index(fields=["field2"], include=["field1"], name="name"),
]
self.assertEqual(
Child.check(databases=self.databases),
[
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Child'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Child,
id="models.E016",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_index_include_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1")
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2")
class Meta:
constraints = [
models.Index(
fields=["id"],
include=["fk_1_id", "fk_2"],
name="name",
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_index(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
indexes = [models.Index(Lower("name"), name="index_lower_name")]
warn = Warning(
"%s does not support indexes on expressions." % connection.display_name,
hint=(
"An index won't be created. Silence this warning if you don't "
"care about it."
),
obj=Model,
id="models.W043",
)
expected = [] if connection.features.supports_expression_indexes else [warn]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_func_index_required_db_features(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
indexes = [models.Index(Lower("name"), name="index_lower_name")]
required_db_features = {"supports_expression_indexes"}
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_index_complex_expression_custom_lookup(self):
class Model(models.Model):
height = models.IntegerField()
weight = models.IntegerField()
class Meta:
indexes = [
models.Index(
models.F("height")
/ (models.F("weight__abs") + models.Value(5)),
name="name",
),
]
with register_lookup(models.IntegerField, Abs):
self.assertEqual(Model.check(), [])
def test_func_index_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [models.Index(Lower("missing_field").desc(), name="name")]
self.assertEqual(
Model.check(),
[
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_func_index_pointing_to_missing_field_nested(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(Abs(Round("missing_field")), name="name"),
]
self.assertEqual(
Model.check(),
[
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_func_index_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
indexes = [models.Index(Lower("m2m"), name="name")]
self.assertEqual(
Model.check(),
[
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id="models.E013",
),
],
)
def test_func_index_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.CharField(max_length=15)
class Bar(Foo):
class Meta:
indexes = [models.Index(Lower("field1"), name="name")]
self.assertEqual(
Bar.check(),
[
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Bar,
id="models.E016",
),
],
)
def test_func_index_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_1")
foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_2")
class Meta:
indexes = [
models.Index(Lower("foo_1_id"), Lower("foo_2"), name="index_name"),
]
self.assertEqual(Bar.check(), [])
@isolate_apps("invalid_models_tests")
class FieldNamesTests(TestCase):
databases = {"default", "other"}
def test_ending_with_underscore(self):
class Model(models.Model):
field_ = models.CharField(max_length=10)
m2m_ = models.ManyToManyField("self")
self.assertEqual(
Model.check(),
[
Error(
"Field names must not end with an underscore.",
obj=Model._meta.get_field("field_"),
id="fields.E001",
),
Error(
"Field names must not end with an underscore.",
obj=Model._meta.get_field("m2m_"),
id="fields.E001",
),
],
)
max_column_name_length, column_limit_db_alias = get_max_column_name_length()
@unittest.skipIf(
max_column_name_length is None,
"The database doesn't have a column name length limit.",
)
def test_M2M_long_column_name(self):
"""
#13711 -- Model check for long M2M column names when database has
column name length limits.
"""
# A model with very long name which will be used to set relations to.
class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(
models.Model
):
title = models.CharField(max_length=11)
# Main model for which checks will be performed.
class ModelWithLongField(models.Model):
m2m_field = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name="rn1",
)
m2m_field2 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name="rn2",
through="m2msimple",
)
m2m_field3 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name="rn3",
through="m2mcomplex",
)
fk = models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
related_name="rn4",
)
# Models used for setting `through` in M2M field.
class m2msimple(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
class m2mcomplex(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
long_field_name = "a" * (self.max_column_name_length + 1)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
).contribute_to_class(m2msimple, long_field_name)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
db_column=long_field_name,
).contribute_to_class(m2mcomplex, long_field_name)
errors = ModelWithLongField.check(databases=("default", "other"))
# First error because of M2M field set on the model with long name.
m2m_long_name = (
"verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id"
)
if self.max_column_name_length > len(m2m_long_name):
# Some databases support names longer than the test name.
expected = []
else:
expected = [
Error(
'Autogenerated column name too long for M2M field "%s". '
'Maximum length is "%s" for database "%s".'
% (
m2m_long_name,
self.max_column_name_length,
self.column_limit_db_alias,
),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id="models.E019",
)
]
# Second error because the FK specified in the `through` model
# `m2msimple` has auto-generated name longer than allowed.
# There will be no check errors in the other M2M because it
# specifies db_column for the FK in `through` model even if the actual
# name is longer than the limits of the database.
expected.append(
Error(
'Autogenerated column name too long for M2M field "%s_id". '
'Maximum length is "%s" for database "%s".'
% (
long_field_name,
self.max_column_name_length,
self.column_limit_db_alias,
),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id="models.E019",
)
)
self.assertEqual(errors, expected)
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
@unittest.skipIf(
max_column_name_length is None,
"The database doesn't have a column name length limit.",
)
def test_local_field_long_column_name(self):
"""
#13711 -- Model check for long column names
when database does not support long names.
"""
class ModelWithLongField(models.Model):
title = models.CharField(max_length=11)
long_field_name = "a" * (self.max_column_name_length + 1)
long_field_name2 = "b" * (self.max_column_name_length + 1)
models.CharField(max_length=11).contribute_to_class(
ModelWithLongField, long_field_name
)
models.CharField(max_length=11, db_column="vlmn").contribute_to_class(
ModelWithLongField, long_field_name2
)
self.assertEqual(
ModelWithLongField.check(databases=("default", "other")),
[
Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (
long_field_name,
self.max_column_name_length,
self.column_limit_db_alias,
),
hint="Set the column name manually using 'db_column'.",
obj=ModelWithLongField,
id="models.E018",
)
],
)
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
def test_including_separator(self):
class Model(models.Model):
some__field = models.IntegerField()
self.assertEqual(
Model.check(),
[
Error(
'Field names must not contain "__".',
obj=Model._meta.get_field("some__field"),
id="fields.E002",
)
],
)
def test_pk(self):
class Model(models.Model):
pk = models.IntegerField()
self.assertEqual(
Model.check(),
[
Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=Model._meta.get_field("pk"),
id="fields.E003",
)
],
)
def test_db_column_clash(self):
class Model(models.Model):
foo = models.IntegerField()
bar = models.IntegerField(db_column="foo")
self.assertEqual(
Model.check(),
[
Error(
"Field 'bar' has column name 'foo' that is used by "
"another field.",
hint="Specify a 'db_column' for the field.",
obj=Model,
id="models.E007",
)
],
)
@isolate_apps("invalid_models_tests")
class ShadowingFieldsTests(SimpleTestCase):
def test_field_name_clash_with_child_accessor(self):
class Parent(models.Model):
pass
class Child(Parent):
child = models.CharField(max_length=100)
self.assertEqual(
Child.check(),
[
Error(
"The field 'child' clashes with the field "
"'child' from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field("child"),
id="models.E006",
)
],
)
def test_field_name_clash_with_m2m_through(self):
class Parent(models.Model):
clash_id = models.IntegerField()
class Child(Parent):
clash = models.ForeignKey("Child", models.CASCADE)
class Model(models.Model):
parents = models.ManyToManyField(
to=Parent,
through="Through",
through_fields=["parent", "model"],
)
class Through(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
model = models.ForeignKey(Model, models.CASCADE)
self.assertEqual(
Child.check(),
[
Error(
"The field 'clash' clashes with the field 'clash_id' from "
"model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field("clash"),
id="models.E006",
)
],
)
def test_multiinheritance_clash(self):
class Mother(models.Model):
clash = models.IntegerField()
class Father(models.Model):
clash = models.IntegerField()
class Child(Mother, Father):
# Here we have two clashed: id (automatic field) and clash, because
# both parents define these fields.
pass
self.assertEqual(
Child.check(),
[
Error(
"The field 'id' from parent model "
"'invalid_models_tests.mother' clashes with the field 'id' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id="models.E005",
),
Error(
"The field 'clash' from parent model "
"'invalid_models_tests.mother' clashes with the field 'clash' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id="models.E005",
),
],
)
def test_inheritance_clash(self):
class Parent(models.Model):
f_id = models.IntegerField()
class Target(models.Model):
# This field doesn't result in a clash.
f_id = models.IntegerField()
class Child(Parent):
# This field clashes with parent "f_id" field.
f = models.ForeignKey(Target, models.CASCADE)
self.assertEqual(
Child.check(),
[
Error(
"The field 'f' clashes with the field 'f_id' "
"from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field("f"),
id="models.E006",
)
],
)
def test_multigeneration_inheritance(self):
class GrandParent(models.Model):
clash = models.IntegerField()
class Parent(GrandParent):
pass
class Child(Parent):
pass
class GrandChild(Child):
clash = models.IntegerField()
self.assertEqual(
GrandChild.check(),
[
Error(
"The field 'clash' clashes with the field 'clash' "
"from model 'invalid_models_tests.grandparent'.",
obj=GrandChild._meta.get_field("clash"),
id="models.E006",
)
],
)
def test_id_clash(self):
class Target(models.Model):
pass
class Model(models.Model):
fk = models.ForeignKey(Target, models.CASCADE)
fk_id = models.IntegerField()
self.assertEqual(
Model.check(),
[
Error(
"The field 'fk_id' clashes with the field 'fk' from model "
"'invalid_models_tests.model'.",
obj=Model._meta.get_field("fk_id"),
id="models.E006",
)
],
)
@isolate_apps("invalid_models_tests")
class OtherModelTests(SimpleTestCase):
def test_unique_primary_key(self):
invalid_id = models.IntegerField(primary_key=False)
class Model(models.Model):
id = invalid_id
self.assertEqual(
Model.check(),
[
Error(
"'id' can only be used as a field name if the field also sets "
"'primary_key=True'.",
obj=Model,
id="models.E004",
),
],
)
def test_ordering_non_iterable(self):
class Model(models.Model):
class Meta:
ordering = "missing_field"
self.assertEqual(
Model.check(),
[
Error(
"'ordering' must be a tuple or list "
"(even if you want to order by only one field).",
obj=Model,
id="models.E014",
),
],
)
def test_just_ordering_no_errors(self):
class Model(models.Model):
order = models.PositiveIntegerField()
class Meta:
ordering = ["order"]
self.assertEqual(Model.check(), [])
def test_just_order_with_respect_to_no_errors(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
class Meta:
order_with_respect_to = "question"
self.assertEqual(Answer.check(), [])
def test_ordering_with_order_with_respect_to(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
order = models.IntegerField()
class Meta:
order_with_respect_to = "question"
ordering = ["order"]
self.assertEqual(
Answer.check(),
[
Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=Answer,
id="models.E021",
),
],
)
def test_non_valid(self):
class RelationModel(models.Model):
pass
class Model(models.Model):
relation = models.ManyToManyField(RelationModel)
class Meta:
ordering = ["relation"]
self.assertEqual(
Model.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'relation'.",
obj=Model,
id="models.E015",
),
],
)
def test_ordering_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
ordering = ("missing_field",)
self.assertEqual(
Model.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_field'.",
obj=Model,
id="models.E015",
)
],
)
def test_ordering_pointing_to_missing_foreignkey_field(self):
class Model(models.Model):
missing_fk_field = models.IntegerField()
class Meta:
ordering = ("missing_fk_field_id",)
self.assertEqual(
Model.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_fk_field_id'.",
obj=Model,
id="models.E015",
)
],
)
def test_ordering_pointing_to_missing_related_field(self):
class Model(models.Model):
test = models.IntegerField()
class Meta:
ordering = ("missing_related__id",)
self.assertEqual(
Model.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_related__id'.",
obj=Model,
id="models.E015",
)
],
)
def test_ordering_pointing_to_missing_related_model_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ("parent__missing_field",)
self.assertEqual(
Child.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id="models.E015",
)
],
)
def test_ordering_pointing_to_non_related_field(self):
class Child(models.Model):
parent = models.IntegerField()
class Meta:
ordering = ("parent__missing_field",)
self.assertEqual(
Child.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id="models.E015",
)
],
)
def test_ordering_pointing_to_two_related_model_field(self):
class Parent2(models.Model):
pass
class Parent1(models.Model):
parent2 = models.ForeignKey(Parent2, models.CASCADE)
class Child(models.Model):
parent1 = models.ForeignKey(Parent1, models.CASCADE)
class Meta:
ordering = ("parent1__parent2__missing_field",)
self.assertEqual(
Child.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent1__parent2__missing_field'.",
obj=Child,
id="models.E015",
)
],
)
def test_ordering_pointing_multiple_times_to_model_fields(self):
class Parent(models.Model):
field1 = models.CharField(max_length=100)
field2 = models.CharField(max_length=100)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ("parent__field1__field2",)
self.assertEqual(
Child.check(),
[
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__field1__field2'.",
obj=Child,
id="models.E015",
)
],
)
def test_ordering_allows_registered_lookups(self):
class Model(models.Model):
test = models.CharField(max_length=100)
class Meta:
ordering = ("test__lower",)
with register_lookup(models.CharField, Lower):
self.assertEqual(Model.check(), [])
def test_ordering_pointing_to_lookup_not_transform(self):
class Model(models.Model):
test = models.CharField(max_length=100)
class Meta:
ordering = ("test__isnull",)
self.assertEqual(Model.check(), [])
def test_ordering_pointing_to_related_model_pk(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ("parent__pk",)
self.assertEqual(Child.check(), [])
def test_ordering_pointing_to_foreignkey_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ("parent_id",)
self.assertFalse(Child.check())
def test_name_beginning_with_underscore(self):
class _Model(models.Model):
pass
self.assertEqual(
_Model.check(),
[
Error(
"The model name '_Model' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=_Model,
id="models.E023",
)
],
)
def test_name_ending_with_underscore(self):
class Model_(models.Model):
pass
self.assertEqual(
Model_.check(),
[
Error(
"The model name 'Model_' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=Model_,
id="models.E023",
)
],
)
def test_name_contains_double_underscores(self):
class Test__Model(models.Model):
pass
self.assertEqual(
Test__Model.check(),
[
Error(
"The model name 'Test__Model' cannot contain double underscores "
"as it collides with the query lookup syntax.",
obj=Test__Model,
id="models.E024",
)
],
)
def test_property_and_related_field_accessor_clash(self):
class Model(models.Model):
fk = models.ForeignKey("self", models.CASCADE)
# Override related field accessor.
Model.fk_id = property(lambda self: "ERROR")
self.assertEqual(
Model.check(),
[
Error(
"The property 'fk_id' clashes with a related field accessor.",
obj=Model,
id="models.E025",
)
],
)
def test_single_primary_key(self):
class Model(models.Model):
foo = models.IntegerField(primary_key=True)
bar = models.IntegerField(primary_key=True)
self.assertEqual(
Model.check(),
[
Error(
"The model cannot have more than one field with "
"'primary_key=True'.",
obj=Model,
id="models.E026",
)
],
)
@override_settings(TEST_SWAPPED_MODEL_BAD_VALUE="not-a-model")
def test_swappable_missing_app_name(self):
class Model(models.Model):
class Meta:
swappable = "TEST_SWAPPED_MODEL_BAD_VALUE"
self.assertEqual(
Model.check(),
[
Error(
"'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form "
"'app_label.app_name'.",
id="models.E001",
),
],
)
@override_settings(TEST_SWAPPED_MODEL_BAD_MODEL="not_an_app.Target")
def test_swappable_missing_app(self):
class Model(models.Model):
class Meta:
swappable = "TEST_SWAPPED_MODEL_BAD_MODEL"
self.assertEqual(
Model.check(),
[
Error(
"'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', "
"which has not been installed, or is abstract.",
id="models.E002",
),
],
)
def test_two_m2m_through_same_relationship(self):
class Person(models.Model):
pass
class Group(models.Model):
primary = models.ManyToManyField(
Person, through="Membership", related_name="primary"
)
secondary = models.ManyToManyField(
Person, through="Membership", related_name="secondary"
)
class Membership(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
self.assertEqual(
Group.check(),
[
Error(
"The model has two identical many-to-many relations through "
"the intermediate model 'invalid_models_tests.Membership'.",
obj=Group,
id="models.E003",
)
],
)
def test_two_m2m_through_same_model_with_different_through_fields(self):
class Country(models.Model):
pass
class ShippingMethod(models.Model):
to_countries = models.ManyToManyField(
Country,
through="ShippingMethodPrice",
through_fields=("method", "to_country"),
)
from_countries = models.ManyToManyField(
Country,
through="ShippingMethodPrice",
through_fields=("method", "from_country"),
related_name="+",
)
class ShippingMethodPrice(models.Model):
method = models.ForeignKey(ShippingMethod, models.CASCADE)
to_country = models.ForeignKey(Country, models.CASCADE)
from_country = models.ForeignKey(Country, models.CASCADE)
self.assertEqual(ShippingMethod.check(), [])
def test_onetoone_with_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
other_place = models.OneToOneField(
Place, models.CASCADE, related_name="other_parking"
)
self.assertEqual(ParkingLot.check(), [])
def test_onetoone_with_explicit_parent_link_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
place = models.OneToOneField(
Place, models.CASCADE, parent_link=True, primary_key=True
)
other_place = models.OneToOneField(
Place, models.CASCADE, related_name="other_parking"
)
self.assertEqual(ParkingLot.check(), [])
def test_m2m_table_name_clash(self):
class Foo(models.Model):
bar = models.ManyToManyField("Bar", db_table="myapp_bar")
class Meta:
db_table = "myapp_foo"
class Bar(models.Model):
class Meta:
db_table = "myapp_bar"
self.assertEqual(
Foo.check(),
[
Error(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field("bar"),
id="fields.E340",
)
],
)
@override_settings(
DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"]
)
def test_m2m_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
bar = models.ManyToManyField("Bar", db_table="myapp_bar")
class Meta:
db_table = "myapp_foo"
class Bar(models.Model):
class Meta:
db_table = "myapp_bar"
self.assertEqual(
Foo.check(),
[
Warning(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field("bar"),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Bar' is "
"correctly routed to a separate database."
),
id="fields.W344",
),
],
)
def test_m2m_field_table_name_clash(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table="clash")
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table="clash")
self.assertEqual(
Bar.check() + Baz.check(),
[
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Baz.foos'.",
obj=Bar._meta.get_field("foos"),
id="fields.E340",
),
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Bar.foos'.",
obj=Baz._meta.get_field("foos"),
id="fields.E340",
),
],
)
@override_settings(
DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"]
)
def test_m2m_field_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table="clash")
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table="clash")
self.assertEqual(
Bar.check() + Baz.check(),
[
Warning(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.%s.foos'." % clashing_model,
obj=model_cls._meta.get_field("foos"),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.%s.foos' is "
"correctly routed to a separate database." % clashing_model
),
id="fields.W344",
)
for model_cls, clashing_model in [(Bar, "Baz"), (Baz, "Bar")]
],
)
def test_m2m_autogenerated_table_name_clash(self):
class Foo(models.Model):
class Meta:
db_table = "bar_foos"
class Bar(models.Model):
# The autogenerated `db_table` will be bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = "bar"
self.assertEqual(
Bar.check(),
[
Error(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field("foos"),
id="fields.E340",
)
],
)
@override_settings(
DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"]
)
def test_m2m_autogenerated_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
class Meta:
db_table = "bar_foos"
class Bar(models.Model):
# The autogenerated db_table is bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = "bar"
self.assertEqual(
Bar.check(),
[
Warning(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field("foos"),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Foo' is "
"correctly routed to a separate database."
),
id="fields.W344",
),
],
)
def test_m2m_unmanaged_shadow_models_not_checked(self):
class A1(models.Model):
pass
class C1(models.Model):
mm_a = models.ManyToManyField(A1, db_table="d1")
# Unmanaged models that shadow the above models. Reused table names
# shouldn't be flagged by any checks.
class A2(models.Model):
class Meta:
managed = False
class C2(models.Model):
mm_a = models.ManyToManyField(A2, through="Intermediate")
class Meta:
managed = False
class Intermediate(models.Model):
a2 = models.ForeignKey(A2, models.CASCADE, db_column="a1_id")
c2 = models.ForeignKey(C2, models.CASCADE, db_column="c1_id")
class Meta:
db_table = "d1"
managed = False
self.assertEqual(C1.check(), [])
self.assertEqual(C2.check(), [])
def test_m2m_to_concrete_and_proxy_allowed(self):
class A(models.Model):
pass
class Through(models.Model):
a = models.ForeignKey("A", models.CASCADE)
c = models.ForeignKey("C", models.CASCADE)
class ThroughProxy(Through):
class Meta:
proxy = True
class C(models.Model):
mm_a = models.ManyToManyField(A, through=Through)
mm_aproxy = models.ManyToManyField(
A, through=ThroughProxy, related_name="proxied_m2m"
)
self.assertEqual(C.check(), [])
@isolate_apps("django.contrib.auth", kwarg_name="apps")
def test_lazy_reference_checks(self, apps):
class DummyModel(models.Model):
author = models.ForeignKey("Author", models.CASCADE)
class Meta:
app_label = "invalid_models_tests"
class DummyClass:
def __call__(self, **kwargs):
pass
def dummy_method(self):
pass
def dummy_function(*args, **kwargs):
pass
apps.lazy_model_operation(dummy_function, ("auth", "imaginarymodel"))
apps.lazy_model_operation(dummy_function, ("fanciful_app", "imaginarymodel"))
post_init.connect(dummy_function, sender="missing-app.Model", apps=apps)
post_init.connect(DummyClass(), sender="missing-app.Model", apps=apps)
post_init.connect(
DummyClass().dummy_method, sender="missing-app.Model", apps=apps
)
self.assertEqual(
_check_lazy_references(apps),
[
Error(
"%r contains a lazy reference to auth.imaginarymodel, "
"but app 'auth' doesn't provide model 'imaginarymodel'."
% dummy_function,
obj=dummy_function,
id="models.E022",
),
Error(
"%r contains a lazy reference to fanciful_app.imaginarymodel, "
"but app 'fanciful_app' isn't installed." % dummy_function,
obj=dummy_function,
id="models.E022",
),
Error(
"An instance of class 'DummyClass' was connected to "
"the 'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj="invalid_models_tests.test_models",
id="signals.E001",
),
Error(
"Bound method 'DummyClass.dummy_method' was connected to the "
"'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj="invalid_models_tests.test_models",
id="signals.E001",
),
Error(
"The field invalid_models_tests.DummyModel.author was declared "
"with a lazy reference to 'invalid_models_tests.author', but app "
"'invalid_models_tests' isn't installed.",
hint=None,
obj=DummyModel.author.field,
id="fields.E307",
),
Error(
"The function 'dummy_function' was connected to the 'post_init' "
"signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj="invalid_models_tests.test_models",
id="signals.E001",
),
],
)
@isolate_apps("invalid_models_tests")
class DbTableCommentTests(TestCase):
def test_db_table_comment(self):
class Model(models.Model):
class Meta:
db_table_comment = "Table comment"
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_comments
else [
Warning(
f"{connection.display_name} does not support comments on tables "
f"(db_table_comment).",
obj=Model,
id="models.W046",
),
]
)
self.assertEqual(errors, expected)
def test_db_table_comment_required_db_features(self):
class Model(models.Model):
class Meta:
db_table_comment = "Table comment"
required_db_features = {"supports_comments"}
self.assertEqual(Model.check(databases=self.databases), [])
class MultipleAutoFieldsTests(TestCase):
def test_multiple_autofields(self):
msg = (
"Model invalid_models_tests.MultipleAutoFields can't have more "
"than one auto-generated field."
)
with self.assertRaisesMessage(ValueError, msg):
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
@isolate_apps("invalid_models_tests")
class JSONFieldTests(TestCase):
@skipUnlessDBFeature("supports_json_field")
def test_ordering_pointing_to_json_field_value(self):
class Model(models.Model):
field = models.JSONField()
class Meta:
ordering = ["field__value"]
self.assertEqual(Model.check(databases=self.databases), [])
def test_check_jsonfield(self):
class Model(models.Model):
field = models.JSONField()
error = Error(
"%s does not support JSONFields." % connection.display_name,
obj=Model,
id="fields.E180",
)
expected = [] if connection.features.supports_json_field else [error]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_check_jsonfield_required_db_features(self):
class Model(models.Model):
field = models.JSONField()
class Meta:
required_db_features = {"supports_json_field"}
self.assertEqual(Model.check(databases=self.databases), [])
@isolate_apps("invalid_models_tests")
class ConstraintsTests(TestCase):
def test_check_constraints(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.CheckConstraint(check=models.Q(age__gte=18), name="is_adult")
]
errors = Model.check(databases=self.databases)
warn = Warning(
"%s does not support check constraints." % connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=Model,
id="models.W027",
)
expected = (
[] if connection.features.supports_table_check_constraints else [warn]
)
self.assertCountEqual(errors, expected)
def test_check_constraints_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_table_check_constraints"}
constraints = [
models.CheckConstraint(check=models.Q(age__gte=18), name="is_adult")
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_check_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
required_db_features = {"supports_table_check_constraints"}
constraints = [
models.CheckConstraint(
name="name",
check=models.Q(missing_field=2),
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
]
if connection.features.supports_table_check_constraints
else [],
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_reverse_fk(self):
class Model(models.Model):
parent = models.ForeignKey("self", models.CASCADE, related_name="parents")
class Meta:
constraints = [
models.CheckConstraint(name="name", check=models.Q(parents=3)),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'parents'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_reverse_o2o(self):
class Model(models.Model):
parent = models.OneToOneField("self", models.CASCADE)
class Meta:
constraints = [
models.CheckConstraint(
name="name",
check=models.Q(model__isnull=True),
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'model'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
constraints = [
models.CheckConstraint(name="name", check=models.Q(m2m=2)),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id="models.E013",
),
],
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1")
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2")
class Meta:
constraints = [
models.CheckConstraint(
name="name",
check=models.Q(fk_1_id=2) | models.Q(fk_2=2),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_pk(self):
class Model(models.Model):
age = models.SmallIntegerField()
class Meta:
constraints = [
models.CheckConstraint(
name="name",
check=models.Q(pk__gt=5) & models.Q(age__gt=models.F("pk")),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
pass
class Meta:
constraints = [
models.CheckConstraint(name="name", check=models.Q(field1=1)),
]
self.assertEqual(
Child.check(databases=self.databases),
[
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Child,
id="models.E016",
),
],
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_joined_fields(self):
class Model(models.Model):
name = models.CharField(max_length=10)
field1 = models.PositiveSmallIntegerField()
field2 = models.PositiveSmallIntegerField()
field3 = models.PositiveSmallIntegerField()
parent = models.ForeignKey("self", models.CASCADE)
previous = models.OneToOneField("self", models.CASCADE, related_name="next")
class Meta:
constraints = [
models.CheckConstraint(
name="name1",
check=models.Q(
field1__lt=models.F("parent__field1")
+ models.F("parent__field2")
),
),
models.CheckConstraint(
name="name2", check=models.Q(name=Lower("parent__name"))
),
models.CheckConstraint(
name="name3", check=models.Q(parent__field3=models.F("field1"))
),
models.CheckConstraint(
name="name4",
check=models.Q(name=Lower("previous__name")),
),
]
joined_fields = [
"parent__field1",
"parent__field2",
"parent__field3",
"parent__name",
"previous__name",
]
errors = Model.check(databases=self.databases)
expected_errors = [
Error(
"'constraints' refers to the joined field '%s'." % field_name,
obj=Model,
id="models.E041",
)
for field_name in joined_fields
]
self.assertCountEqual(errors, expected_errors)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_check_constraint_pointing_to_joined_fields_complex_check(self):
class Model(models.Model):
name = models.PositiveSmallIntegerField()
field1 = models.PositiveSmallIntegerField()
field2 = models.PositiveSmallIntegerField()
parent = models.ForeignKey("self", models.CASCADE)
class Meta:
constraints = [
models.CheckConstraint(
name="name",
check=models.Q(
(
models.Q(name="test")
& models.Q(field1__lt=models.F("parent__field1"))
)
| (
models.Q(name__startswith=Lower("parent__name"))
& models.Q(
field1__gte=(
models.F("parent__field1")
+ models.F("parent__field2")
)
)
)
)
| (models.Q(name="test1")),
),
]
joined_fields = ["parent__field1", "parent__field2", "parent__name"]
errors = Model.check(databases=self.databases)
expected_errors = [
Error(
"'constraints' refers to the joined field '%s'." % field_name,
obj=Model,
id="models.E041",
)
for field_name in joined_fields
]
self.assertCountEqual(errors, expected_errors)
def test_check_constraint_raw_sql_check(self):
class Model(models.Model):
class Meta:
required_db_features = {"supports_table_check_constraints"}
constraints = [
models.CheckConstraint(check=models.Q(id__gt=0), name="q_check"),
models.CheckConstraint(
check=models.ExpressionWrapper(
models.Q(price__gt=20),
output_field=models.BooleanField(),
),
name="expression_wrapper_check",
),
models.CheckConstraint(
check=models.expressions.RawSQL(
"id = 0",
params=(),
output_field=models.BooleanField(),
),
name="raw_sql_check",
),
models.CheckConstraint(
check=models.Q(
models.ExpressionWrapper(
models.Q(
models.expressions.RawSQL(
"id = 0",
params=(),
output_field=models.BooleanField(),
)
),
output_field=models.BooleanField(),
)
),
name="nested_raw_sql_check",
),
]
expected_warnings = (
[
Warning(
"Check constraint 'raw_sql_check' contains RawSQL() expression and "
"won't be validated during the model full_clean().",
hint="Silence this warning if you don't care about it.",
obj=Model,
id="models.W045",
),
Warning(
"Check constraint 'nested_raw_sql_check' contains RawSQL() "
"expression and won't be validated during the model full_clean().",
hint="Silence this warning if you don't care about it.",
obj=Model,
id="models.W045",
),
]
if connection.features.supports_table_check_constraints
else []
)
self.assertEqual(Model.check(databases=self.databases), expected_warnings)
def test_unique_constraint_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_gte_100",
condition=models.Q(age__gte=100),
),
]
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_partial_indexes
else [
Warning(
"%s does not support unique constraints with conditions."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id="models.W036",
),
]
)
self.assertEqual(errors, expected)
def test_unique_constraint_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_partial_indexes"}
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_gte_100",
condition=models.Q(age__gte=100),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_condition_pointing_to_missing_field(self):
class Model(models.Model):
age = models.SmallIntegerField()
class Meta:
required_db_features = {"supports_partial_indexes"}
constraints = [
models.UniqueConstraint(
name="name",
fields=["age"],
condition=models.Q(missing_field=2),
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
]
if connection.features.supports_partial_indexes
else [],
)
def test_unique_constraint_condition_pointing_to_joined_fields(self):
class Model(models.Model):
age = models.SmallIntegerField()
parent = models.ForeignKey("self", models.CASCADE)
class Meta:
required_db_features = {"supports_partial_indexes"}
constraints = [
models.UniqueConstraint(
name="name",
fields=["age"],
condition=models.Q(parent__age__lt=2),
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the joined field 'parent__age__lt'.",
obj=Model,
id="models.E041",
)
]
if connection.features.supports_partial_indexes
else [],
)
def test_unique_constraint_pointing_to_reverse_o2o(self):
class Model(models.Model):
parent = models.OneToOneField("self", models.CASCADE)
class Meta:
required_db_features = {"supports_partial_indexes"}
constraints = [
models.UniqueConstraint(
fields=["parent"],
name="name",
condition=models.Q(model__isnull=True),
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'model'.",
obj=Model,
id="models.E012",
),
]
if connection.features.supports_partial_indexes
else [],
)
def test_deferrable_unique_constraint(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_deferrable",
deferrable=models.Deferrable.DEFERRED,
),
]
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_deferrable_unique_constraints
else [
Warning(
"%s does not support deferrable unique constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id="models.W038",
),
]
)
self.assertEqual(errors, expected)
def test_deferrable_unique_constraint_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_deferrable_unique_constraints"}
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_deferrable",
deferrable=models.Deferrable.IMMEDIATE,
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(fields=["missing_field"], name="name")
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
def test_unique_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
constraints = [models.UniqueConstraint(fields=["m2m"], name="name")]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id="models.E013",
),
],
)
def test_unique_constraint_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(fields=["field2", "field1"], name="name"),
]
self.assertEqual(
Child.check(databases=self.databases),
[
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Child,
id="models.E016",
),
],
)
def test_unique_constraint_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1")
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2")
class Meta:
constraints = [
models.UniqueConstraint(fields=["fk_1_id", "fk_2"], name="name"),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_with_include(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_include_id",
include=["id"],
),
]
errors = Model.check(databases=self.databases)
expected = (
[]
if connection.features.supports_covering_indexes
else [
Warning(
"%s does not support unique constraints with non-key columns."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id="models.W039",
),
]
)
self.assertEqual(errors, expected)
def test_unique_constraint_with_include_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {"supports_covering_indexes"}
constraints = [
models.UniqueConstraint(
fields=["age"],
name="unique_age_include_id",
include=["id"],
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_covering_indexes")
def test_unique_constraint_include_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
fields=["id"],
include=["missing_field"],
name="name",
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_unique_constraint_include_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["id"],
include=["m2m"],
name="name",
),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id="models.E013",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_unique_constraint_include_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["field2"],
include=["field1"],
name="name",
),
]
self.assertEqual(
Child.check(databases=self.databases),
[
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Child,
id="models.E016",
),
],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_unique_constraint_include_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1")
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2")
class Meta:
constraints = [
models.UniqueConstraint(
fields=["id"],
include=["fk_1_id", "fk_2"],
name="name",
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_unique_constraint(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
constraints = [
models.UniqueConstraint(Lower("name"), name="lower_name_uq"),
]
warn = Warning(
"%s does not support unique constraints on expressions."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=Model,
id="models.W044",
)
expected = [] if connection.features.supports_expression_indexes else [warn]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_func_unique_constraint_required_db_features(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
constraints = [
models.UniqueConstraint(Lower("name"), name="lower_name_unq"),
]
required_db_features = {"supports_expression_indexes"}
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_expression_custom_lookup(self):
class Model(models.Model):
height = models.IntegerField()
weight = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
models.F("height")
/ (models.F("weight__abs") + models.Value(5)),
name="name",
),
]
with register_lookup(models.IntegerField, Abs):
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(Lower("missing_field").desc(), name="name"),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_pointing_to_missing_field_nested(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(Abs(Round("missing_field")), name="name"),
]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id="models.E012",
),
],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField("self")
class Meta:
constraints = [models.UniqueConstraint(Lower("m2m"), name="name")]
self.assertEqual(
Model.check(databases=self.databases),
[
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id="models.E013",
),
],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.CharField(max_length=15)
class Bar(Foo):
class Meta:
constraints = [models.UniqueConstraint(Lower("field1"), name="name")]
self.assertEqual(
Bar.check(databases=self.databases),
[
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint="This issue may be caused by multi-table inheritance.",
obj=Bar,
id="models.E016",
),
],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_pointing_to_fk(self):
class Foo(models.Model):
id = models.CharField(primary_key=True, max_length=255)
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_1")
foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_2")
class Meta:
constraints = [
models.UniqueConstraint(
Lower("foo_1_id"),
Lower("foo_2"),
name="name",
),
]
self.assertEqual(Bar.check(databases=self.databases), [])
|
277195ee93cb560a042b313118bdc7389130c62a5ea46c34297e4e6479bfb230 | import unittest
import uuid
from django.core.checks import Error
from django.core.checks import Warning as DjangoWarning
from django.db import connection, models
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import isolate_apps, override_settings
from django.utils.functional import lazy
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from django.utils.version import get_docs_version
@isolate_apps("invalid_models_tests")
class AutoFieldTests(SimpleTestCase):
def test_valid_case(self):
class Model(models.Model):
id = models.AutoField(primary_key=True)
field = Model._meta.get_field("id")
self.assertEqual(field.check(), [])
def test_primary_key(self):
# primary_key must be True. Refs #12467.
class Model(models.Model):
field = models.AutoField(primary_key=False)
# Prevent Django from autocreating `id` AutoField, which would
# result in an error, because a model must have exactly one
# AutoField.
another = models.IntegerField(primary_key=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"AutoFields must set primary_key=True.",
obj=field,
id="fields.E100",
),
],
)
def test_max_length_warning(self):
class Model(models.Model):
auto = models.AutoField(primary_key=True, max_length=2)
field = Model._meta.get_field("auto")
self.assertEqual(
field.check(),
[
DjangoWarning(
"'max_length' is ignored when used with %s."
% field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id="fields.W122",
),
],
)
@isolate_apps("invalid_models_tests")
class BinaryFieldTests(SimpleTestCase):
def test_valid_default_value(self):
class Model(models.Model):
field1 = models.BinaryField(default=b"test")
field2 = models.BinaryField(default=None)
for field_name in ("field1", "field2"):
field = Model._meta.get_field(field_name)
self.assertEqual(field.check(), [])
def test_str_default_value(self):
class Model(models.Model):
field = models.BinaryField(default="test")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"BinaryField's default cannot be a string. Use bytes content "
"instead.",
obj=field,
id="fields.E170",
),
],
)
@isolate_apps("invalid_models_tests")
class CharFieldTests(TestCase):
def test_valid_field(self):
class Model(models.Model):
field = models.CharField(
max_length=255,
choices=[
("1", "item1"),
("2", "item2"),
],
db_index=True,
)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_missing_max_length(self):
class Model(models.Model):
field = models.CharField()
field = Model._meta.get_field("field")
expected = (
[]
if connection.features.supports_unlimited_charfield
else [
Error(
"CharFields must define a 'max_length' attribute.",
obj=field,
id="fields.E120",
),
]
)
self.assertEqual(field.check(), expected)
def test_negative_max_length(self):
class Model(models.Model):
field = models.CharField(max_length=-1)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_bad_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_str_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="20")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_str_max_length_type(self):
class Model(models.Model):
field = models.CharField(max_length=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_non_iterable_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=field,
id="fields.E004",
),
],
)
def test_non_iterable_choices_two_letters(self):
"""Two letters isn't a valid choice pair."""
class Model(models.Model):
field = models.CharField(max_length=10, choices=["ab"])
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_iterable_of_iterable_choices(self):
class ThingItem:
def __init__(self, value, display):
self.value = value
self.display = display
def __iter__(self):
return iter((self.value, self.display))
def __len__(self):
return 2
class Things:
def __iter__(self):
return iter((ThingItem(1, 2), ThingItem(3, 4)))
class ThingWithIterableChoices(models.Model):
thing = models.CharField(max_length=100, blank=True, choices=Things())
self.assertEqual(ThingWithIterableChoices._meta.get_field("thing").check(), [])
def test_choices_containing_non_pairs(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
class Model2(models.Model):
field = models.IntegerField(choices=[0])
for model in (Model, Model2):
with self.subTest(model.__name__):
field = model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual "
"value, human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_containing_lazy(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=[["1", _("1")], ["2", _("2")]]
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_lazy_choices(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=lazy(lambda: [[1, "1"], [2, "2"]], tuple)()
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_named_group(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
["knights", [["L", "Lancelot"], ["G", "Galahad"]]],
["wizards", [["T", "Tim the Enchanter"]]],
["R", "Random character"],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_named_group_non_pairs(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[["knights", [["L", "Lancelot", "Du Lac"]]]],
)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_named_group_bad_structure(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
[
"knights",
[
["Noble", [["G", "Galahad"]]],
["Combative", [["L", "Lancelot"]]],
],
],
],
)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_named_group_lazy(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
[_("knights"), [["L", _("Lancelot")], ["G", _("Galahad")]]],
["R", _("Random character")],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_in_max_length(self):
class Model(models.Model):
field = models.CharField(
max_length=2,
choices=[("ABC", "Value Too Long!"), ("OK", "Good")],
)
group = models.CharField(
max_length=2,
choices=[
("Nested", [("OK", "Good"), ("Longer", "Longer")]),
("Grouped", [("Bad", "Bad")]),
],
)
for name, choice_max_length in (("field", 3), ("group", 6)):
with self.subTest(name):
field = Model._meta.get_field(name)
self.assertEqual(
field.check(),
[
Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=field,
id="fields.E009",
),
],
)
def test_bad_db_index_value(self):
class Model(models.Model):
field = models.CharField(max_length=10, db_index="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'db_index' must be None, True or False.",
obj=field,
id="fields.E006",
),
],
)
def test_bad_validators(self):
class Model(models.Model):
field = models.CharField(max_length=10, validators=[True])
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"All 'validators' must be callable.",
hint=(
"validators[0] (True) isn't a function or instance of a "
"validator class."
),
obj=field,
id="fields.E008",
),
],
)
@unittest.skipUnless(connection.vendor == "mysql", "Test valid only for MySQL")
def test_too_long_char_field_under_mysql(self):
from django.db.backends.mysql.validation import DatabaseValidation
class Model(models.Model):
field = models.CharField(unique=True, max_length=256)
field = Model._meta.get_field("field")
validator = DatabaseValidation(connection=connection)
self.assertEqual(
validator.check_field(field),
[
DjangoWarning(
"%s may not allow unique CharFields to have a max_length > "
"255." % connection.display_name,
hint=(
"See: https://docs.djangoproject.com/en/%s/ref/databases/"
"#mysql-character-fields" % get_docs_version()
),
obj=field,
id="mysql.W003",
)
],
)
def test_db_collation(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation="anything")
field = Model._meta.get_field("field")
error = Error(
"%s does not support a database collation on CharFields."
% connection.display_name,
id="fields.E190",
obj=field,
)
expected = (
[] if connection.features.supports_collation_on_charfield else [error]
)
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation="anything")
class Meta:
required_db_features = {"supports_collation_on_charfield"}
field = Model._meta.get_field("field")
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps("invalid_models_tests")
class DateFieldTests(SimpleTestCase):
maxDiff = None
def test_auto_now_and_auto_now_add_raise_error(self):
class Model(models.Model):
field0 = models.DateTimeField(auto_now=True, auto_now_add=True, default=now)
field1 = models.DateTimeField(
auto_now=True, auto_now_add=False, default=now
)
field2 = models.DateTimeField(
auto_now=False, auto_now_add=True, default=now
)
field3 = models.DateTimeField(
auto_now=True, auto_now_add=True, default=None
)
expected = []
checks = []
for i in range(4):
field = Model._meta.get_field("field%d" % i)
expected.append(
Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=field,
id="fields.E160",
)
)
checks.extend(field.check())
self.assertEqual(checks, expected)
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateField(default=now())
field_d = models.DateField(default=now().date())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field("field_dt")
field_d = Model._meta.get_field("field_d")
field_now = Model._meta.get_field("field_now")
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_dt,
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_d,
id="fields.W161",
),
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class DateTimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateTimeField(default=now())
field_d = models.DateTimeField(default=now().date())
field_now = models.DateTimeField(default=now)
field_dt = Model._meta.get_field("field_dt")
field_d = Model._meta.get_field("field_d")
field_now = Model._meta.get_field("field_now")
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_dt,
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_d,
id="fields.W161",
),
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class DecimalFieldTests(SimpleTestCase):
def test_required_attributes(self):
class Model(models.Model):
field = models.DecimalField()
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=field,
id="fields.E130",
),
Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=field,
id="fields.E132",
),
],
)
def test_negative_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits=-1, decimal_places=-1)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id="fields.E131",
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id="fields.E133",
),
],
)
def test_bad_values_of_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits="bad", decimal_places="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id="fields.E131",
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id="fields.E133",
),
],
)
def test_decimal_places_greater_than_max_digits(self):
class Model(models.Model):
field = models.DecimalField(max_digits=9, decimal_places=10)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=field,
id="fields.E134",
),
],
)
def test_valid_field(self):
class Model(models.Model):
field = models.DecimalField(max_digits=10, decimal_places=10)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
@isolate_apps("invalid_models_tests")
class FileFieldTests(SimpleTestCase):
def test_valid_default_case(self):
class Model(models.Model):
field = models.FileField()
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_case(self):
class Model(models.Model):
field = models.FileField(upload_to="somewhere")
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_primary_key(self):
class Model(models.Model):
field = models.FileField(primary_key=False, upload_to="somewhere")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'primary_key' is not a valid argument for a FileField.",
obj=field,
id="fields.E201",
)
],
)
def test_upload_to_starts_with_slash(self):
class Model(models.Model):
field = models.FileField(upload_to="/somewhere")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"FileField's 'upload_to' argument must be a relative path, not "
"an absolute path.",
obj=field,
id="fields.E202",
hint="Remove the leading slash.",
)
],
)
def test_upload_to_callable_not_checked(self):
def callable(instance, filename):
return "/" + filename
class Model(models.Model):
field = models.FileField(upload_to=callable)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
@isolate_apps("invalid_models_tests")
class FilePathFieldTests(SimpleTestCase):
def test_forbidden_files_and_folders(self):
class Model(models.Model):
field = models.FilePathField(allow_files=False, allow_folders=False)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=field,
id="fields.E140",
),
],
)
@isolate_apps("invalid_models_tests")
class GenericIPAddressFieldTests(SimpleTestCase):
def test_non_nullable_blank(self):
class Model(models.Model):
field = models.GenericIPAddressField(null=False, blank=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls."
),
obj=field,
id="fields.E150",
),
],
)
@isolate_apps("invalid_models_tests")
class ImageFieldTests(SimpleTestCase):
def test_pillow_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
pillow_installed = False
else:
pillow_installed = True
class Model(models.Model):
field = models.ImageField(upload_to="somewhere")
field = Model._meta.get_field("field")
errors = field.check()
expected = (
[]
if pillow_installed
else [
Error(
"Cannot use ImageField because Pillow is not installed.",
hint=(
"Get Pillow at https://pypi.org/project/Pillow/ "
'or run command "python -m pip install Pillow".'
),
obj=field,
id="fields.E210",
),
]
)
self.assertEqual(errors, expected)
@isolate_apps("invalid_models_tests")
class IntegerFieldTests(SimpleTestCase):
def test_max_length_warning(self):
class Model(models.Model):
integer = models.IntegerField(max_length=2)
biginteger = models.BigIntegerField(max_length=2)
smallinteger = models.SmallIntegerField(max_length=2)
positiveinteger = models.PositiveIntegerField(max_length=2)
positivebiginteger = models.PositiveBigIntegerField(max_length=2)
positivesmallinteger = models.PositiveSmallIntegerField(max_length=2)
for field in Model._meta.get_fields():
if field.auto_created:
continue
with self.subTest(name=field.name):
self.assertEqual(
field.check(),
[
DjangoWarning(
"'max_length' is ignored when used with %s."
% field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id="fields.W122",
)
],
)
@isolate_apps("invalid_models_tests")
class TimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.TimeField(default=now())
field_t = models.TimeField(default=now().time())
# Timezone-aware time object (when USE_TZ=True).
field_tz = models.TimeField(default=now().timetz())
field_now = models.DateField(default=now)
names = ["field_dt", "field_t", "field_tz", "field_now"]
fields = [Model._meta.get_field(name) for name in names]
errors = []
for field in fields:
errors.extend(field.check())
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=fields[0],
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=fields[1],
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime value as "
"default for this field. This may not be what you want. "
"If you want to have the current date as default, use "
"`django.utils.timezone.now`"
),
obj=fields[2],
id="fields.W161",
),
# field_now doesn't raise a warning.
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class TextFieldTests(TestCase):
@skipIfDBFeature("supports_index_on_text_field")
def test_max_length_warning(self):
class Model(models.Model):
value = models.TextField(db_index=True)
field = Model._meta.get_field("value")
field_type = field.db_type(connection)
self.assertEqual(
field.check(databases=self.databases),
[
DjangoWarning(
"%s does not support a database index on %s columns."
% (connection.display_name, field_type),
hint=(
"An index won't be created. Silence this warning if you "
"don't care about it."
),
obj=field,
id="fields.W162",
)
],
)
def test_db_collation(self):
class Model(models.Model):
field = models.TextField(db_collation="anything")
field = Model._meta.get_field("field")
error = Error(
"%s does not support a database collation on TextFields."
% connection.display_name,
id="fields.E190",
obj=field,
)
expected = (
[] if connection.features.supports_collation_on_textfield else [error]
)
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.TextField(db_collation="anything")
class Meta:
required_db_features = {"supports_collation_on_textfield"}
field = Model._meta.get_field("field")
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps("invalid_models_tests")
class UUIDFieldTests(TestCase):
def test_choices_named_group(self):
class Model(models.Model):
field = models.UUIDField(
choices=[
[
"knights",
[
[
uuid.UUID("5c859437-d061-4847-b3f7-e6b78852f8c8"),
"Lancelot",
],
[
uuid.UUID("c7853ec1-2ea3-4359-b02d-b54e8f1bcee2"),
"Galahad",
],
],
],
[uuid.UUID("25d405be-4895-4d50-9b2e-d6695359ce47"), "Other"],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
@isolate_apps("invalid_models_tests")
@skipUnlessDBFeature("supports_json_field")
class JSONFieldTests(TestCase):
def test_invalid_default(self):
class Model(models.Model):
field = models.JSONField(default={})
self.assertEqual(
Model._meta.get_field("field").check(),
[
DjangoWarning(
msg=(
"JSONField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint=("Use a callable instead, e.g., use `dict` instead of `{}`."),
obj=Model._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class Model(models.Model):
field = models.JSONField(default=dict)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_default_none(self):
class Model(models.Model):
field = models.JSONField(default=None)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_callable_default(self):
def callable_default():
return {"it": "works"}
class Model(models.Model):
field = models.JSONField(default=callable_default)
self.assertEqual(Model._meta.get_field("field").check(), [])
@isolate_apps("invalid_models_tests")
class DbCommentTests(TestCase):
def test_db_comment(self):
class Model(models.Model):
field = models.IntegerField(db_comment="Column comment")
errors = Model._meta.get_field("field").check(databases=self.databases)
expected = (
[]
if connection.features.supports_comments
else [
DjangoWarning(
f"{connection.display_name} does not support comments on columns "
f"(db_comment).",
obj=Model._meta.get_field("field"),
id="fields.W163",
),
]
)
self.assertEqual(errors, expected)
def test_db_comment_required_db_features(self):
class Model(models.Model):
field = models.IntegerField(db_comment="Column comment")
class Meta:
required_db_features = {"supports_comments"}
errors = Model._meta.get_field("field").check(databases=self.databases)
self.assertEqual(errors, [])
|
6219ae69c89a322799d9a9bbeb2aefeb1e64c6c0782844c14d3cca58d52cd369 | import operator
import uuid
from unittest import mock
from django import forms
from django.core import serializers
from django.core.exceptions import ValidationError
from django.core.serializers.json import DjangoJSONEncoder
from django.db import (
DataError,
IntegrityError,
NotSupportedError,
OperationalError,
connection,
models,
)
from django.db.models import (
Count,
ExpressionWrapper,
F,
IntegerField,
JSONField,
OuterRef,
Q,
Subquery,
Transform,
Value,
)
from django.db.models.expressions import RawSQL
from django.db.models.fields.json import (
KT,
KeyTextTransform,
KeyTransform,
KeyTransformFactory,
KeyTransformTextLookupMixin,
)
from django.db.models.functions import Cast
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from django.utils.deprecation import RemovedInDjango51Warning
from .models import CustomJSONDecoder, JSONModel, NullableJSONModel, RelatedJSONModel
@skipUnlessDBFeature("supports_json_field")
class JSONFieldTests(TestCase):
def test_invalid_value(self):
msg = "is not JSON serializable"
with self.assertRaisesMessage(TypeError, msg):
NullableJSONModel.objects.create(
value={
"uuid": uuid.UUID("d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475"),
}
)
def test_custom_encoder_decoder(self):
value = {"uuid": uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")}
obj = NullableJSONModel(value_custom=value)
obj.clean_fields()
obj.save()
obj.refresh_from_db()
self.assertEqual(obj.value_custom, value)
def test_db_check_constraints(self):
value = "{@!invalid json value 123 $!@#"
with mock.patch.object(DjangoJSONEncoder, "encode", return_value=value):
with self.assertRaises((IntegrityError, DataError, OperationalError)):
NullableJSONModel.objects.create(value_custom=value)
class TestMethods(SimpleTestCase):
def test_deconstruct(self):
field = models.JSONField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.JSONField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_deconstruct_custom_encoder_decoder(self):
field = models.JSONField(encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(kwargs["encoder"], DjangoJSONEncoder)
self.assertEqual(kwargs["decoder"], CustomJSONDecoder)
def test_get_transforms(self):
@models.JSONField.register_lookup
class MyTransform(Transform):
lookup_name = "my_transform"
field = models.JSONField()
transform = field.get_transform("my_transform")
self.assertIs(transform, MyTransform)
models.JSONField._unregister_lookup(MyTransform)
transform = field.get_transform("my_transform")
self.assertIsInstance(transform, KeyTransformFactory)
def test_key_transform_text_lookup_mixin_non_key_transform(self):
transform = Transform("test")
msg = (
"Transform should be an instance of KeyTransform in order to use "
"this lookup."
)
with self.assertRaisesMessage(TypeError, msg):
KeyTransformTextLookupMixin(transform)
class TestValidation(SimpleTestCase):
def test_invalid_encoder(self):
msg = "The encoder parameter must be a callable object."
with self.assertRaisesMessage(ValueError, msg):
models.JSONField(encoder=DjangoJSONEncoder())
def test_invalid_decoder(self):
msg = "The decoder parameter must be a callable object."
with self.assertRaisesMessage(ValueError, msg):
models.JSONField(decoder=CustomJSONDecoder())
def test_validation_error(self):
field = models.JSONField()
msg = "Value must be valid JSON."
value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")
with self.assertRaisesMessage(ValidationError, msg):
field.clean({"uuid": value}, None)
def test_custom_encoder(self):
field = models.JSONField(encoder=DjangoJSONEncoder)
value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")
field.clean({"uuid": value}, None)
class TestFormField(SimpleTestCase):
def test_formfield(self):
model_field = models.JSONField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.JSONField)
def test_formfield_custom_encoder_decoder(self):
model_field = models.JSONField(
encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder
)
form_field = model_field.formfield()
self.assertIs(form_field.encoder, DjangoJSONEncoder)
self.assertIs(form_field.decoder, CustomJSONDecoder)
class TestSerialization(SimpleTestCase):
test_data = (
'[{"fields": {"value": %s}, "model": "model_fields.jsonmodel", "pk": null}]'
)
test_values = (
# (Python value, serialized value),
({"a": "b", "c": None}, '{"a": "b", "c": null}'),
("abc", '"abc"'),
('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'),
)
def test_dumping(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = JSONModel(value=value)
data = serializers.serialize("json", [instance])
self.assertJSONEqual(data, self.test_data % serialized)
def test_loading(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = list(
serializers.deserialize("json", self.test_data % serialized)
)[0].object
self.assertEqual(instance.value, value)
def test_xml_serialization(self):
test_xml_data = (
'<django-objects version="1.0">'
'<object model="model_fields.nullablejsonmodel">'
'<field name="value" type="JSONField">%s'
"</field></object></django-objects>"
)
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = NullableJSONModel(value=value)
data = serializers.serialize("xml", [instance], fields=["value"])
self.assertXMLEqual(data, test_xml_data % serialized)
new_instance = list(serializers.deserialize("xml", data))[0].object
self.assertEqual(new_instance.value, instance.value)
@skipUnlessDBFeature("supports_json_field")
class TestSaveLoad(TestCase):
def test_null(self):
obj = NullableJSONModel(value=None)
obj.save()
obj.refresh_from_db()
self.assertIsNone(obj.value)
def test_ambiguous_str_value_deprecation(self):
msg = (
"Providing an encoded JSON string via Value() is deprecated. Use Value([], "
"output_field=JSONField()) instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
obj = NullableJSONModel.objects.create(value=Value("[]"))
obj.refresh_from_db()
self.assertEqual(obj.value, [])
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_value_str_primitives_deprecation(self):
msg = (
"Providing an encoded JSON string via Value() is deprecated. Use "
"Value(None, output_field=JSONField()) instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
obj = NullableJSONModel.objects.create(value=Value("null"))
obj.refresh_from_db()
self.assertIsNone(obj.value)
obj = NullableJSONModel.objects.create(value=Value("invalid-json"))
obj.refresh_from_db()
self.assertEqual(obj.value, "invalid-json")
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_json_null_different_from_sql_null(self):
json_null = NullableJSONModel.objects.create(value=Value(None, JSONField()))
NullableJSONModel.objects.update(value=Value(None, JSONField()))
json_null.refresh_from_db()
sql_null = NullableJSONModel.objects.create(value=None)
sql_null.refresh_from_db()
# 'null' is not equal to NULL in the database.
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value=Value(None, JSONField())),
[json_null],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value=None),
[json_null],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__isnull=True),
[sql_null],
)
# 'null' is equal to NULL in Python (None).
self.assertEqual(json_null.value, sql_null.value)
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_primitives(self):
values = [
True,
1,
1.45,
"String",
"",
]
for value in values:
with self.subTest(value=value):
obj = JSONModel(value=value)
obj.save()
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_dict(self):
values = [
{},
{"name": "John", "age": 20, "height": 180.3},
{"a": True, "b": {"b1": False, "b2": None}},
]
for value in values:
with self.subTest(value=value):
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_list(self):
values = [
[],
["John", 20, 180.3],
[True, [False, None]],
]
for value in values:
with self.subTest(value=value):
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_realistic_object(self):
value = {
"name": "John",
"age": 20,
"pets": [
{"name": "Kit", "type": "cat", "age": 2},
{"name": "Max", "type": "dog", "age": 1},
],
"courses": [
["A1", "A2", "A3"],
["B1", "B2"],
["C1"],
],
}
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
@skipUnlessDBFeature("supports_json_field")
class TestQuerying(TestCase):
@classmethod
def setUpTestData(cls):
cls.primitives = [True, False, "yes", 7, 9.6]
values = [
None,
[],
{},
{"a": "b", "c": 14},
{
"a": "b",
"c": 14,
"d": ["e", {"f": "g"}],
"h": True,
"i": False,
"j": None,
"k": {"l": "m"},
"n": [None, True, False],
"o": '"quoted"',
"p": 4.2,
"r": {"s": True, "t": False},
},
[1, [2]],
{"k": True, "l": False, "foo": "bax"},
{
"foo": "bar",
"baz": {"a": "b", "c": "d"},
"bar": ["foo", "bar"],
"bax": {"foo": "bar"},
},
]
cls.objs = [NullableJSONModel.objects.create(value=value) for value in values]
if connection.features.supports_primitives_in_json_field:
cls.objs.extend(
[
NullableJSONModel.objects.create(value=value)
for value in cls.primitives
]
)
cls.raw_sql = "%s::jsonb" if connection.vendor == "postgresql" else "%s"
def test_exact(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__exact={}),
[self.objs[2]],
)
def test_exact_complex(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__exact={"a": "b", "c": 14}),
[self.objs[3]],
)
def test_icontains(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__icontains="BaX"),
self.objs[6:8],
)
def test_isnull(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__isnull=True),
[self.objs[0]],
)
def test_ordering_by_transform(self):
mariadb = connection.vendor == "mysql" and connection.mysql_is_mariadb
values = [
{"ord": 93, "name": "bar"},
{"ord": 22.1, "name": "foo"},
{"ord": -1, "name": "baz"},
{"ord": 21.931902, "name": "spam"},
{"ord": -100291029, "name": "eggs"},
]
for field_name in ["value", "value_custom"]:
with self.subTest(field=field_name):
objs = [
NullableJSONModel.objects.create(**{field_name: value})
for value in values
]
query = NullableJSONModel.objects.filter(
**{"%s__name__isnull" % field_name: False},
).order_by("%s__ord" % field_name)
expected = [objs[4], objs[2], objs[3], objs[1], objs[0]]
if mariadb or connection.vendor == "oracle":
# MariaDB and Oracle return JSON values as strings.
expected = [objs[2], objs[4], objs[3], objs[1], objs[0]]
self.assertSequenceEqual(query, expected)
def test_ordering_grouping_by_key_transform(self):
base_qs = NullableJSONModel.objects.filter(value__d__0__isnull=False)
for qs in (
base_qs.order_by("value__d__0"),
base_qs.annotate(
key=KeyTransform("0", KeyTransform("d", "value"))
).order_by("key"),
):
self.assertSequenceEqual(qs, [self.objs[4]])
none_val = "" if connection.features.interprets_empty_strings_as_nulls else None
qs = NullableJSONModel.objects.filter(value__isnull=False)
self.assertQuerySetEqual(
qs.filter(value__isnull=False)
.annotate(key=KT("value__d__1__f"))
.values("key")
.annotate(count=Count("key"))
.order_by("count"),
[(none_val, 0), ("g", 1)],
operator.itemgetter("key", "count"),
)
def test_ordering_grouping_by_count(self):
qs = (
NullableJSONModel.objects.filter(
value__isnull=False,
)
.values("value__d__0")
.annotate(count=Count("value__d__0"))
.order_by("count")
)
self.assertQuerySetEqual(qs, [0, 1], operator.itemgetter("count"))
def test_order_grouping_custom_decoder(self):
NullableJSONModel.objects.create(value_custom={"a": "b"})
qs = NullableJSONModel.objects.filter(value_custom__isnull=False)
self.assertSequenceEqual(
qs.values(
"value_custom__a",
)
.annotate(
count=Count("id"),
)
.order_by("value_custom__a"),
[{"value_custom__a": "b", "count": 1}],
)
def test_key_transform_raw_expression(self):
expr = RawSQL(self.raw_sql, ['{"x": "bar"}'])
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__foo=KeyTransform("x", expr)),
[self.objs[7]],
)
def test_nested_key_transform_raw_expression(self):
expr = RawSQL(self.raw_sql, ['{"x": {"y": "bar"}}'])
self.assertSequenceEqual(
NullableJSONModel.objects.filter(
value__foo=KeyTransform("y", KeyTransform("x", expr))
),
[self.objs[7]],
)
def test_key_transform_expression(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=KeyTransform("d", "value"),
chain=KeyTransform("0", "key"),
expr=KeyTransform("0", Cast("key", models.JSONField())),
)
.filter(chain=F("expr")),
[self.objs[4]],
)
def test_key_transform_annotation_expression(self):
obj = NullableJSONModel.objects.create(value={"d": ["e", "e"]})
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=F("value__d"),
chain=F("key__0"),
expr=Cast("key", models.JSONField()),
)
.filter(chain=F("expr__1")),
[obj],
)
def test_nested_key_transform_expression(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=KeyTransform("d", "value"),
chain=KeyTransform("f", KeyTransform("1", "key")),
expr=KeyTransform(
"f", KeyTransform("1", Cast("key", models.JSONField()))
),
)
.filter(chain=F("expr")),
[self.objs[4]],
)
def test_nested_key_transform_annotation_expression(self):
obj = NullableJSONModel.objects.create(
value={"d": ["e", {"f": "g"}, {"f": "g"}]},
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=F("value__d"),
chain=F("key__1__f"),
expr=Cast("key", models.JSONField()),
)
.filter(chain=F("expr__2__f")),
[obj],
)
def test_nested_key_transform_on_subquery(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
subquery_value=Subquery(
NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value")
),
key=KeyTransform("d", "subquery_value"),
chain=KeyTransform("f", KeyTransform("1", "key")),
)
.filter(chain="g"),
[self.objs[4]],
)
def test_key_text_transform_char_lookup(self):
qs = NullableJSONModel.objects.annotate(
char_value=KeyTextTransform("foo", "value"),
).filter(char_value__startswith="bar")
self.assertSequenceEqual(qs, [self.objs[7]])
qs = NullableJSONModel.objects.annotate(
char_value=KeyTextTransform(1, KeyTextTransform("bar", "value")),
).filter(char_value__startswith="bar")
self.assertSequenceEqual(qs, [self.objs[7]])
def test_expression_wrapper_key_transform(self):
self.assertCountEqual(
NullableJSONModel.objects.annotate(
expr=ExpressionWrapper(
KeyTransform("c", "value"),
output_field=IntegerField(),
),
).filter(expr__isnull=False),
self.objs[3:5],
)
def test_has_key(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__has_key="a"),
[self.objs[3], self.objs[4]],
)
def test_has_key_null_value(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__has_key="j"),
[self.objs[4]],
)
def test_has_key_deep(self):
tests = [
(Q(value__baz__has_key="a"), self.objs[7]),
(
Q(value__has_key=KeyTransform("a", KeyTransform("baz", "value"))),
self.objs[7],
),
(Q(value__has_key=F("value__baz__a")), self.objs[7]),
(
Q(value__has_key=KeyTransform("c", KeyTransform("baz", "value"))),
self.objs[7],
),
(Q(value__has_key=F("value__baz__c")), self.objs[7]),
(Q(value__d__1__has_key="f"), self.objs[4]),
(
Q(
value__has_key=KeyTransform(
"f", KeyTransform("1", KeyTransform("d", "value"))
)
),
self.objs[4],
),
(Q(value__has_key=F("value__d__1__f")), self.objs[4]),
]
for condition, expected in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[expected],
)
def test_has_key_list(self):
obj = NullableJSONModel.objects.create(value=[{"a": 1}, {"b": "x"}])
tests = [
Q(value__1__has_key="b"),
Q(value__has_key=KeyTransform("b", KeyTransform(1, "value"))),
Q(value__has_key=KeyTransform("b", KeyTransform("1", "value"))),
Q(value__has_key=F("value__1__b")),
]
for condition in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[obj],
)
def test_has_keys(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__has_keys=["a", "c", "h"]),
[self.objs[4]],
)
def test_has_any_keys(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__has_any_keys=["c", "l"]),
[self.objs[3], self.objs[4], self.objs[6]],
)
def test_has_key_number(self):
obj = NullableJSONModel.objects.create(
value={
"123": "value",
"nested": {"456": "bar", "lorem": "abc", "999": True},
"array": [{"789": "baz", "777": "def", "ipsum": 200}],
"000": "val",
}
)
tests = [
Q(value__has_key="123"),
Q(value__nested__has_key="456"),
Q(value__array__0__has_key="789"),
Q(value__has_keys=["nested", "123", "array", "000"]),
Q(value__nested__has_keys=["lorem", "999", "456"]),
Q(value__array__0__has_keys=["789", "ipsum", "777"]),
Q(value__has_any_keys=["000", "nonexistent"]),
Q(value__nested__has_any_keys=["999", "nonexistent"]),
Q(value__array__0__has_any_keys=["777", "nonexistent"]),
]
for condition in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[obj],
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contains(self):
tests = [
({}, self.objs[2:5] + self.objs[6:8]),
({"baz": {"a": "b", "c": "d"}}, [self.objs[7]]),
({"baz": {"a": "b"}}, [self.objs[7]]),
({"baz": {"c": "d"}}, [self.objs[7]]),
({"k": True, "l": False}, [self.objs[6]]),
({"d": ["e", {"f": "g"}]}, [self.objs[4]]),
({"d": ["e"]}, [self.objs[4]]),
({"d": [{"f": "g"}]}, [self.objs[4]]),
([1, [2]], [self.objs[5]]),
([1], [self.objs[5]]),
([[2]], [self.objs[5]]),
({"n": [None, True, False]}, [self.objs[4]]),
({"j": None}, [self.objs[4]]),
]
for value, expected in tests:
with self.subTest(value=value):
qs = NullableJSONModel.objects.filter(value__contains=value)
self.assertCountEqual(qs, expected)
@skipIfDBFeature("supports_json_field_contains")
def test_contains_unsupported(self):
msg = "contains lookup is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
NullableJSONModel.objects.filter(
value__contains={"baz": {"a": "b", "c": "d"}},
).get()
@skipUnlessDBFeature(
"supports_primitives_in_json_field",
"supports_json_field_contains",
)
def test_contains_primitives(self):
for value in self.primitives:
with self.subTest(value=value):
qs = NullableJSONModel.objects.filter(value__contains=value)
self.assertIs(qs.exists(), True)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contained_by(self):
qs = NullableJSONModel.objects.filter(
value__contained_by={"a": "b", "c": 14, "h": True}
)
self.assertCountEqual(qs, self.objs[2:4])
@skipIfDBFeature("supports_json_field_contains")
def test_contained_by_unsupported(self):
msg = "contained_by lookup is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
NullableJSONModel.objects.filter(value__contained_by={"a": "b"}).get()
def test_deep_values(self):
qs = NullableJSONModel.objects.values_list("value__k__l").order_by("pk")
expected_objs = [(None,)] * len(self.objs)
expected_objs[4] = ("m",)
self.assertSequenceEqual(qs, expected_objs)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_deep_distinct(self):
query = NullableJSONModel.objects.distinct("value__k__l").values_list(
"value__k__l"
)
self.assertSequenceEqual(query, [("m",), (None,)])
def test_isnull_key(self):
# key__isnull=False works the same as has_key='key'.
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a__isnull=True),
self.objs[:3] + self.objs[5:],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__j__isnull=True),
self.objs[:4] + self.objs[5:],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a__isnull=False),
[self.objs[3], self.objs[4]],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__j__isnull=False),
[self.objs[4]],
)
def test_isnull_key_or_none(self):
obj = NullableJSONModel.objects.create(value={"a": None})
self.assertCountEqual(
NullableJSONModel.objects.filter(
Q(value__a__isnull=True) | Q(value__a=None)
),
self.objs[:3] + self.objs[5:] + [obj],
)
def test_none_key(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__j=None),
[self.objs[4]],
)
def test_none_key_exclude(self):
obj = NullableJSONModel.objects.create(value={"j": 1})
if connection.vendor == "oracle":
# Oracle supports filtering JSON objects with NULL keys, but the
# current implementation doesn't support it.
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(value__j=None),
self.objs[1:4] + self.objs[5:] + [obj],
)
else:
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(value__j=None), [obj]
)
def test_shallow_list_lookup(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__0=1),
[self.objs[5]],
)
def test_shallow_obj_lookup(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a="b"),
[self.objs[3], self.objs[4]],
)
def test_obj_subquery_lookup(self):
qs = NullableJSONModel.objects.annotate(
field=Subquery(
NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value")
),
).filter(field__a="b")
self.assertCountEqual(qs, [self.objs[3], self.objs[4]])
def test_deep_lookup_objs(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__k__l="m"),
[self.objs[4]],
)
def test_shallow_lookup_obj_target(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__k={"l": "m"}),
[self.objs[4]],
)
def test_deep_lookup_array(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__1__0=2),
[self.objs[5]],
)
def test_deep_lookup_mixed(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__1__f="g"),
[self.objs[4]],
)
def test_deep_lookup_transform(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__c__gt=2),
[self.objs[3], self.objs[4]],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__c__gt=2.33),
[self.objs[3], self.objs[4]],
)
self.assertIs(NullableJSONModel.objects.filter(value__c__lt=5).exists(), False)
def test_lookup_exclude(self):
tests = [
(Q(value__a="b"), [self.objs[0]]),
(Q(value__foo="bax"), [self.objs[0], self.objs[7]]),
]
for condition, expected in tests:
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition),
expected,
)
self.assertCountEqual(
NullableJSONModel.objects.filter(~condition),
expected,
)
def test_lookup_exclude_nonexistent_key(self):
# Values without the key are ignored.
condition = Q(value__foo="bax")
objs_with_value = [self.objs[6]]
objs_with_different_value = [self.objs[0], self.objs[7]]
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition),
objs_with_different_value,
)
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(~condition),
objs_with_value,
)
self.assertCountEqual(
NullableJSONModel.objects.filter(condition | ~condition),
objs_with_value + objs_with_different_value,
)
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition & ~condition),
objs_with_value + objs_with_different_value,
)
# Add the __isnull lookup to get an exhaustive set.
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition & Q(value__foo__isnull=False)),
self.objs[0:6] + self.objs[7:],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition & Q(value__foo__isnull=False)),
objs_with_value,
)
def test_usage_in_subquery(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(
id__in=NullableJSONModel.objects.filter(value__c=14),
),
self.objs[3:5],
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_array_key_contains(self):
tests = [
([], [self.objs[7]]),
("bar", [self.objs[7]]),
(["bar"], [self.objs[7]]),
("ar", []),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__bar__contains=value),
expected,
)
def test_key_iexact(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iexact="BaR").exists(), True
)
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iexact='"BaR"').exists(), False
)
def test_key_in(self):
tests = [
("value__c__in", [14], self.objs[3:5]),
("value__c__in", [14, 15], self.objs[3:5]),
("value__0__in", [1], [self.objs[5]]),
("value__0__in", [1, 3], [self.objs[5]]),
("value__foo__in", ["bar"], [self.objs[7]]),
(
"value__foo__in",
[KeyTransform("foo", KeyTransform("bax", "value"))],
[self.objs[7]],
),
("value__foo__in", [F("value__bax__foo")], [self.objs[7]]),
(
"value__foo__in",
[KeyTransform("foo", KeyTransform("bax", "value")), "baz"],
[self.objs[7]],
),
("value__foo__in", [F("value__bax__foo"), "baz"], [self.objs[7]]),
("value__foo__in", ["bar", "baz"], [self.objs[7]]),
("value__bar__in", [["foo", "bar"]], [self.objs[7]]),
("value__bar__in", [["foo", "bar"], ["a"]], [self.objs[7]]),
("value__bax__in", [{"foo": "bar"}, {"a": "b"}], [self.objs[7]]),
("value__h__in", [True, "foo"], [self.objs[4]]),
("value__i__in", [False, "foo"], [self.objs[4]]),
]
for lookup, value, expected in tests:
with self.subTest(lookup=lookup, value=value):
self.assertCountEqual(
NullableJSONModel.objects.filter(**{lookup: value}),
expected,
)
def test_key_values(self):
qs = NullableJSONModel.objects.filter(value__h=True)
tests = [
("value__a", "b"),
("value__c", 14),
("value__d", ["e", {"f": "g"}]),
("value__h", True),
("value__i", False),
("value__j", None),
("value__k", {"l": "m"}),
("value__n", [None, True, False]),
("value__p", 4.2),
("value__r", {"s": True, "t": False}),
]
for lookup, expected in tests:
with self.subTest(lookup=lookup):
self.assertEqual(qs.values_list(lookup, flat=True).get(), expected)
def test_key_values_boolean(self):
qs = NullableJSONModel.objects.filter(value__h=True, value__i=False)
tests = [
("value__h", True),
("value__i", False),
]
for lookup, expected in tests:
with self.subTest(lookup=lookup):
self.assertIs(qs.values_list(lookup, flat=True).get(), expected)
@skipUnlessDBFeature("supports_json_field_contains")
def test_key_contains(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__contains="ar").exists(), False
)
self.assertIs(
NullableJSONModel.objects.filter(value__foo__contains="bar").exists(), True
)
def test_key_icontains(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__icontains="Ar").exists(), True
)
def test_key_startswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__startswith="b").exists(), True
)
def test_key_istartswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__istartswith="B").exists(), True
)
def test_key_endswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__endswith="r").exists(), True
)
def test_key_iendswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iendswith="R").exists(), True
)
def test_key_regex(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__regex=r"^bar$").exists(), True
)
def test_key_iregex(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iregex=r"^bAr$").exists(), True
)
def test_key_quoted_string(self):
self.assertEqual(
NullableJSONModel.objects.filter(value__o='"quoted"').get(),
self.objs[4],
)
@skipUnlessDBFeature("has_json_operators")
def test_key_sql_injection(self):
with CaptureQueriesContext(connection) as queries:
self.assertIs(
NullableJSONModel.objects.filter(
**{
"""value__test' = '"a"') OR 1 = 1 OR ('d""": "x",
}
).exists(),
False,
)
self.assertIn(
"""."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"'""",
queries[0]["sql"],
)
@skipIfDBFeature("has_json_operators")
def test_key_sql_injection_escape(self):
query = str(
JSONModel.objects.filter(
**{
"""value__test") = '"a"' OR 1 = 1 OR ("d""": "x",
}
).query
)
self.assertIn('"test\\"', query)
self.assertIn('\\"d', query)
def test_key_escape(self):
obj = NullableJSONModel.objects.create(value={"%total": 10})
self.assertEqual(
NullableJSONModel.objects.filter(**{"value__%total": 10}).get(), obj
)
def test_none_key_and_exact_lookup(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__a="b", value__j=None),
[self.objs[4]],
)
def test_lookups_with_key_transform(self):
tests = (
("value__baz__has_key", "c"),
("value__baz__has_keys", ["a", "c"]),
("value__baz__has_any_keys", ["a", "x"]),
("value__has_key", KeyTextTransform("foo", "value")),
)
for lookup, value in tests:
with self.subTest(lookup=lookup):
self.assertIs(
NullableJSONModel.objects.filter(
**{lookup: value},
).exists(),
True,
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contains_contained_by_with_key_transform(self):
tests = [
("value__d__contains", "e"),
("value__d__contains", [{"f": "g"}]),
("value__contains", KeyTransform("bax", "value")),
("value__contains", F("value__bax")),
("value__baz__contains", {"a": "b"}),
("value__baz__contained_by", {"a": "b", "c": "d", "e": "f"}),
(
"value__contained_by",
KeyTransform(
"x",
RawSQL(
self.raw_sql,
['{"x": {"a": "b", "c": 1, "d": "e"}}'],
),
),
),
]
# For databases where {'f': 'g'} (without surrounding []) matches
# [{'f': 'g'}].
if not connection.features.json_key_contains_list_matching_requires_list:
tests.append(("value__d__contains", {"f": "g"}))
for lookup, value in tests:
with self.subTest(lookup=lookup, value=value):
self.assertIs(
NullableJSONModel.objects.filter(
**{lookup: value},
).exists(),
True,
)
def test_join_key_transform_annotation_expression(self):
related_obj = RelatedJSONModel.objects.create(
value={"d": ["f", "e"]},
json_model=self.objs[4],
)
RelatedJSONModel.objects.create(
value={"d": ["e", "f"]},
json_model=self.objs[4],
)
self.assertSequenceEqual(
RelatedJSONModel.objects.annotate(
key=F("value__d"),
related_key=F("json_model__value__d"),
chain=F("key__1"),
expr=Cast("key", models.JSONField()),
).filter(chain=F("related_key__0")),
[related_obj],
)
def test_key_text_transform_from_lookup(self):
qs = NullableJSONModel.objects.annotate(b=KT("value__bax__foo")).filter(
b__contains="ar",
)
self.assertSequenceEqual(qs, [self.objs[7]])
qs = NullableJSONModel.objects.annotate(c=KT("value__o")).filter(
c__contains="uot",
)
self.assertSequenceEqual(qs, [self.objs[4]])
def test_key_text_transform_from_lookup_invalid(self):
msg = "Lookup must contain key or index transforms."
with self.assertRaisesMessage(ValueError, msg):
KT("value")
with self.assertRaisesMessage(ValueError, msg):
KT("")
|
a715379ae186390e949ecdb0d280da2fd61a886865be8642e1bac927ff72b80c | import os
import pickle
import sys
import tempfile
import unittest
from pathlib import Path
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import File, temp
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import TemporaryUploadedFile
from django.db import IntegrityError, models
from django.test import TestCase, override_settings
from django.test.utils import isolate_apps
from .models import Document
class FileFieldTests(TestCase):
def test_clearable(self):
"""
FileField.save_form_data() will clear its instance attribute value if
passed False.
"""
d = Document(myfile="something.txt")
self.assertEqual(d.myfile, "something.txt")
field = d._meta.get_field("myfile")
field.save_form_data(d, False)
self.assertEqual(d.myfile, "")
def test_unchanged(self):
"""
FileField.save_form_data() considers None to mean "no change" rather
than "clear".
"""
d = Document(myfile="something.txt")
self.assertEqual(d.myfile, "something.txt")
field = d._meta.get_field("myfile")
field.save_form_data(d, None)
self.assertEqual(d.myfile, "something.txt")
def test_changed(self):
"""
FileField.save_form_data(), if passed a truthy value, updates its
instance attribute.
"""
d = Document(myfile="something.txt")
self.assertEqual(d.myfile, "something.txt")
field = d._meta.get_field("myfile")
field.save_form_data(d, "else.txt")
self.assertEqual(d.myfile, "else.txt")
def test_delete_when_file_unset(self):
"""
Calling delete on an unset FileField should not call the file deletion
process, but fail silently (#20660).
"""
d = Document()
d.myfile.delete()
def test_refresh_from_db(self):
d = Document.objects.create(myfile="something.txt")
d.refresh_from_db()
self.assertIs(d.myfile.instance, d)
@unittest.skipIf(sys.platform == "win32", "Crashes with OSError on Windows.")
def test_save_without_name(self):
with tempfile.NamedTemporaryFile(suffix=".txt") as tmp:
document = Document.objects.create(myfile="something.txt")
document.myfile = File(tmp)
msg = f"Detected path traversal attempt in '{tmp.name}'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
document.save()
def test_defer(self):
Document.objects.create(myfile="something.txt")
self.assertEqual(Document.objects.defer("myfile")[0].myfile, "something.txt")
def test_unique_when_same_filename(self):
"""
A FileField with unique=True shouldn't allow two instances with the
same name to be saved.
"""
Document.objects.create(myfile="something.txt")
with self.assertRaises(IntegrityError):
Document.objects.create(myfile="something.txt")
@unittest.skipIf(
sys.platform == "win32", "Windows doesn't support moving open files."
)
# The file's source and destination must be on the same filesystem.
@override_settings(MEDIA_ROOT=temp.gettempdir())
def test_move_temporary_file(self):
"""
The temporary uploaded file is moved rather than copied to the
destination.
"""
with TemporaryUploadedFile(
"something.txt", "text/plain", 0, "UTF-8"
) as tmp_file:
tmp_file_path = tmp_file.temporary_file_path()
Document.objects.create(myfile=tmp_file)
self.assertFalse(
os.path.exists(tmp_file_path), "Temporary file still exists"
)
def test_open_returns_self(self):
"""
FieldField.open() returns self so it can be used as a context manager.
"""
d = Document.objects.create(myfile="something.txt")
# Replace the FileField's file with an in-memory ContentFile, so that
# open() doesn't write to disk.
d.myfile.file = ContentFile(b"", name="bla")
self.assertEqual(d.myfile, d.myfile.open())
def test_media_root_pathlib(self):
with tempfile.TemporaryDirectory() as tmp_dir:
with override_settings(MEDIA_ROOT=Path(tmp_dir)):
with TemporaryUploadedFile(
"foo.txt", "text/plain", 1, "utf-8"
) as tmp_file:
document = Document.objects.create(myfile=tmp_file)
self.assertIs(
document.myfile.storage.exists(
os.path.join("unused", "foo.txt")
),
True,
)
def test_pickle(self):
with tempfile.TemporaryDirectory() as tmp_dir:
with override_settings(MEDIA_ROOT=Path(tmp_dir)):
with open(__file__, "rb") as fp:
file1 = File(fp, name="test_file.py")
document = Document(myfile="test_file.py")
document.myfile.save("test_file.py", file1)
try:
dump = pickle.dumps(document)
loaded_document = pickle.loads(dump)
self.assertEqual(document.myfile, loaded_document.myfile)
self.assertEqual(
document.myfile.url,
loaded_document.myfile.url,
)
self.assertEqual(
document.myfile.storage,
loaded_document.myfile.storage,
)
self.assertEqual(
document.myfile.instance,
loaded_document.myfile.instance,
)
self.assertEqual(
document.myfile.field,
loaded_document.myfile.field,
)
myfile_dump = pickle.dumps(document.myfile)
loaded_myfile = pickle.loads(myfile_dump)
self.assertEqual(document.myfile, loaded_myfile)
self.assertEqual(document.myfile.url, loaded_myfile.url)
self.assertEqual(
document.myfile.storage,
loaded_myfile.storage,
)
self.assertEqual(
document.myfile.instance,
loaded_myfile.instance,
)
self.assertEqual(document.myfile.field, loaded_myfile.field)
finally:
document.myfile.delete()
@isolate_apps("model_fields")
def test_abstract_filefield_model(self):
"""
FileField.model returns the concrete model for fields defined in an
abstract model.
"""
class AbstractMyDocument(models.Model):
myfile = models.FileField(upload_to="unused")
class Meta:
abstract = True
class MyDocument(AbstractMyDocument):
pass
document = MyDocument(myfile="test_file.py")
self.assertEqual(document.myfile.field.model, MyDocument)
|
c09cf21eed36b1d4edbd4f73a999e8d7ba6e519884289c06da650cb1e7551702 | from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import NotSupportedError, connection
from django.db.models import Prefetch, QuerySet, prefetch_related_objects
from django.db.models.query import get_prefetcher
from django.db.models.sql import Query
from django.test import (
TestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext, ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
from .models import (
Article,
Author,
Author2,
AuthorAddress,
AuthorWithAge,
Bio,
Book,
Bookmark,
BookReview,
BookWithYear,
Comment,
Department,
Employee,
FavoriteAuthors,
House,
LessonEntry,
ModelIterableSubclass,
Person,
Qualification,
Reader,
Room,
TaggedItem,
Teacher,
WordEntry,
)
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.book1 = Book.objects.create(title="Poems")
cls.book2 = Book.objects.create(title="Jane Eyre")
cls.book3 = Book.objects.create(title="Wuthering Heights")
cls.book4 = Book.objects.create(title="Sense and Sensibility")
cls.author1 = Author.objects.create(name="Charlotte", first_book=cls.book1)
cls.author2 = Author.objects.create(name="Anne", first_book=cls.book1)
cls.author3 = Author.objects.create(name="Emily", first_book=cls.book1)
cls.author4 = Author.objects.create(name="Jane", first_book=cls.book4)
cls.book1.authors.add(cls.author1, cls.author2, cls.author3)
cls.book2.authors.add(cls.author1)
cls.book3.authors.add(cls.author3)
cls.book4.authors.add(cls.author4)
cls.reader1 = Reader.objects.create(name="Amy")
cls.reader2 = Reader.objects.create(name="Belinda")
cls.reader1.books_read.add(cls.book1, cls.book4)
cls.reader2.books_read.add(cls.book2, cls.book4)
class PrefetchRelatedTests(TestDataMixin, TestCase):
def assertWhereContains(self, sql, needle):
where_idx = sql.index("WHERE")
self.assertEqual(
sql.count(str(needle), where_idx),
1,
msg="WHERE clause doesn't contain %s, actual SQL: %s"
% (needle, sql[where_idx:]),
)
def test_m2m_forward(self):
with self.assertNumQueries(2):
lists = [
list(b.authors.all()) for b in Book.objects.prefetch_related("authors")
]
normal_lists = [list(b.authors.all()) for b in Book.objects.all()]
self.assertEqual(lists, normal_lists)
def test_m2m_reverse(self):
with self.assertNumQueries(2):
lists = [
list(a.books.all()) for a in Author.objects.prefetch_related("books")
]
normal_lists = [list(a.books.all()) for a in Author.objects.all()]
self.assertEqual(lists, normal_lists)
def test_foreignkey_forward(self):
with self.assertNumQueries(2):
books = [
a.first_book for a in Author.objects.prefetch_related("first_book")
]
normal_books = [a.first_book for a in Author.objects.all()]
self.assertEqual(books, normal_books)
def test_foreignkey_reverse(self):
with self.assertNumQueries(2):
[
list(b.first_time_authors.all())
for b in Book.objects.prefetch_related("first_time_authors")
]
self.assertSequenceEqual(self.book2.authors.all(), [self.author1])
def test_onetoone_reverse_no_match(self):
# Regression for #17439
with self.assertNumQueries(2):
book = Book.objects.prefetch_related("bookwithyear").all()[0]
with self.assertNumQueries(0):
with self.assertRaises(BookWithYear.DoesNotExist):
book.bookwithyear
def test_onetoone_reverse_with_to_field_pk(self):
"""
A model (Bio) with a OneToOneField primary key (author) that references
a non-pk field (name) on the related model (Author) is prefetchable.
"""
Bio.objects.bulk_create(
[
Bio(author=self.author1),
Bio(author=self.author2),
Bio(author=self.author3),
]
)
authors = Author.objects.filter(
name__in=[self.author1, self.author2, self.author3],
).prefetch_related("bio")
with self.assertNumQueries(2):
for author in authors:
self.assertEqual(author.name, author.bio.author.name)
def test_survives_clone(self):
with self.assertNumQueries(2):
[
list(b.first_time_authors.all())
for b in Book.objects.prefetch_related("first_time_authors").exclude(
id=1000
)
]
def test_len(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related("first_time_authors")
len(qs)
[list(b.first_time_authors.all()) for b in qs]
def test_bool(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related("first_time_authors")
bool(qs)
[list(b.first_time_authors.all()) for b in qs]
def test_count(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related("first_time_authors")
[b.first_time_authors.count() for b in qs]
def test_exists(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related("first_time_authors")
[b.first_time_authors.exists() for b in qs]
def test_in_and_prefetch_related(self):
"""
Regression test for #20242 - QuerySet "in" didn't work the first time
when using prefetch_related. This was fixed by the removal of chunked
reads from QuerySet iteration in
70679243d1786e03557c28929f9762a119e3ac14.
"""
qs = Book.objects.prefetch_related("first_time_authors")
self.assertIn(qs[0], qs)
def test_clear(self):
with self.assertNumQueries(5):
with_prefetch = Author.objects.prefetch_related("books")
without_prefetch = with_prefetch.prefetch_related(None)
[list(a.books.all()) for a in without_prefetch]
def test_m2m_then_m2m(self):
"""A m2m can be followed through another m2m."""
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related("books__read_by")
lists = [
[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs
]
self.assertEqual(
lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
],
)
def test_overriding_prefetch(self):
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related("books", "books__read_by")
lists = [
[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs
]
self.assertEqual(
lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
],
)
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related("books__read_by", "books")
lists = [
[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs
]
self.assertEqual(
lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
],
)
def test_get(self):
"""
Objects retrieved with .get() get the prefetch behavior.
"""
# Need a double
with self.assertNumQueries(3):
author = Author.objects.prefetch_related("books__read_by").get(
name="Charlotte"
)
lists = [[str(r) for r in b.read_by.all()] for b in author.books.all()]
self.assertEqual(lists, [["Amy"], ["Belinda"]]) # Poems, Jane Eyre
def test_foreign_key_then_m2m(self):
"""
A m2m relation can be followed after a relation like ForeignKey that
doesn't have many objects.
"""
with self.assertNumQueries(2):
qs = Author.objects.select_related("first_book").prefetch_related(
"first_book__read_by"
)
lists = [[str(r) for r in a.first_book.read_by.all()] for a in qs]
self.assertEqual(lists, [["Amy"], ["Amy"], ["Amy"], ["Amy", "Belinda"]])
def test_reverse_one_to_one_then_m2m(self):
"""
A m2m relation can be followed after going through the select_related
reverse of an o2o.
"""
qs = Author.objects.prefetch_related("bio__books").select_related("bio")
with self.assertNumQueries(1):
list(qs.all())
Bio.objects.create(author=self.author1)
with self.assertNumQueries(2):
list(qs.all())
def test_attribute_error(self):
qs = Reader.objects.prefetch_related("books_read__xyz")
msg = (
"Cannot find 'xyz' on Book object, 'books_read__xyz' "
"is an invalid parameter to prefetch_related()"
)
with self.assertRaisesMessage(AttributeError, msg) as cm:
list(qs)
self.assertIn("prefetch_related", str(cm.exception))
def test_invalid_final_lookup(self):
qs = Book.objects.prefetch_related("authors__name")
msg = (
"'authors__name' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to prefetch_related()."
)
with self.assertRaisesMessage(ValueError, msg) as cm:
list(qs)
self.assertIn("prefetch_related", str(cm.exception))
self.assertIn("name", str(cm.exception))
def test_prefetch_eq(self):
prefetch_1 = Prefetch("authors", queryset=Author.objects.all())
prefetch_2 = Prefetch("books", queryset=Book.objects.all())
self.assertEqual(prefetch_1, prefetch_1)
self.assertEqual(prefetch_1, mock.ANY)
self.assertNotEqual(prefetch_1, prefetch_2)
def test_forward_m2m_to_attr_conflict(self):
msg = "to_attr=authors conflicts with a field on the Book model."
authors = Author.objects.all()
with self.assertRaisesMessage(ValueError, msg):
list(
Book.objects.prefetch_related(
Prefetch("authors", queryset=authors, to_attr="authors"),
)
)
# Without the ValueError, an author was deleted due to the implicit
# save of the relation assignment.
self.assertEqual(self.book1.authors.count(), 3)
def test_reverse_m2m_to_attr_conflict(self):
msg = "to_attr=books conflicts with a field on the Author model."
poems = Book.objects.filter(title="Poems")
with self.assertRaisesMessage(ValueError, msg):
list(
Author.objects.prefetch_related(
Prefetch("books", queryset=poems, to_attr="books"),
)
)
# Without the ValueError, a book was deleted due to the implicit
# save of reverse relation assignment.
self.assertEqual(self.author1.books.count(), 2)
def test_m2m_then_reverse_fk_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related("authors__addresses"))
sql = queries[-1]["sql"]
self.assertWhereContains(sql, self.author1.name)
def test_m2m_then_m2m_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related("authors__favorite_authors"))
sql = queries[-1]["sql"]
self.assertWhereContains(sql, self.author1.name)
def test_m2m_then_reverse_one_to_one_object_ids(self):
with CaptureQueriesContext(connection) as queries:
list(Book.objects.prefetch_related("authors__authorwithage"))
sql = queries[-1]["sql"]
self.assertWhereContains(sql, self.author1.id)
def test_filter_deferred(self):
"""
Related filtering of prefetched querysets is deferred on m2m and
reverse m2o relations until necessary.
"""
add_q = Query.add_q
for relation in ["authors", "first_time_authors"]:
with self.subTest(relation=relation):
with mock.patch.object(
Query,
"add_q",
autospec=True,
side_effect=lambda self, q: add_q(self, q),
) as add_q_mock:
list(Book.objects.prefetch_related(relation))
self.assertEqual(add_q_mock.call_count, 1)
def test_named_values_list(self):
qs = Author.objects.prefetch_related("books")
self.assertCountEqual(
[value.name for value in qs.values_list("name", named=True)],
["Anne", "Charlotte", "Emily", "Jane"],
)
def test_m2m_prefetching_iterator_with_chunks(self):
with self.assertNumQueries(3):
authors = [
b.authors.first()
for b in Book.objects.prefetch_related("authors").iterator(chunk_size=2)
]
self.assertEqual(
authors,
[self.author1, self.author1, self.author3, self.author4],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_m2m_prefetching_iterator_without_chunks(self):
# prefetch_related() is ignored.
with self.assertNumQueries(5):
authors = [
b.authors.first()
for b in Book.objects.prefetch_related("authors").iterator()
]
self.assertEqual(
authors,
[self.author1, self.author1, self.author3, self.author4],
)
def test_m2m_prefetching_iterator_without_chunks_warning(self):
msg = (
"Using QuerySet.iterator() after prefetch_related() without "
"specifying chunk_size is deprecated."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Book.objects.prefetch_related("authors").iterator()
class RawQuerySetTests(TestDataMixin, TestCase):
def test_basic(self):
with self.assertNumQueries(2):
books = Book.objects.raw(
"SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,)
).prefetch_related("authors")
book1 = list(books)[0]
with self.assertNumQueries(0):
self.assertCountEqual(
book1.authors.all(), [self.author1, self.author2, self.author3]
)
def test_prefetch_before_raw(self):
with self.assertNumQueries(2):
books = Book.objects.prefetch_related("authors").raw(
"SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,)
)
book1 = list(books)[0]
with self.assertNumQueries(0):
self.assertCountEqual(
book1.authors.all(), [self.author1, self.author2, self.author3]
)
def test_clear(self):
with self.assertNumQueries(5):
with_prefetch = Author.objects.raw(
"SELECT * FROM prefetch_related_author"
).prefetch_related("books")
without_prefetch = with_prefetch.prefetch_related(None)
[list(a.books.all()) for a in without_prefetch]
class CustomPrefetchTests(TestCase):
@classmethod
def traverse_qs(cls, obj_iter, path):
"""
Helper method that returns a list containing a list of the objects in the
obj_iter. Then for each object in the obj_iter, the path will be
recursively travelled and the found objects are added to the return value.
"""
ret_val = []
if hasattr(obj_iter, "all"):
obj_iter = obj_iter.all()
try:
iter(obj_iter)
except TypeError:
obj_iter = [obj_iter]
for obj in obj_iter:
rel_objs = []
for part in path:
if not part:
continue
try:
related = getattr(obj, part[0])
except ObjectDoesNotExist:
continue
if related is not None:
rel_objs.extend(cls.traverse_qs(related, [part[1:]]))
ret_val.append((obj, rel_objs))
return ret_val
@classmethod
def setUpTestData(cls):
cls.person1 = Person.objects.create(name="Joe")
cls.person2 = Person.objects.create(name="Mary")
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
cls.house1 = House.objects.create(
name="House 1", address="123 Main St", owner=cls.person1
)
cls.room1_1 = Room.objects.create(name="Dining room", house=cls.house1)
cls.room1_2 = Room.objects.create(name="Lounge", house=cls.house1)
cls.room1_3 = Room.objects.create(name="Kitchen", house=cls.house1)
cls.house1.main_room = cls.room1_1
cls.house1.save()
cls.person1.houses.add(cls.house1)
cls.house2 = House.objects.create(
name="House 2", address="45 Side St", owner=cls.person1
)
cls.room2_1 = Room.objects.create(name="Dining room", house=cls.house2)
cls.room2_2 = Room.objects.create(name="Lounge", house=cls.house2)
cls.room2_3 = Room.objects.create(name="Kitchen", house=cls.house2)
cls.house2.main_room = cls.room2_1
cls.house2.save()
cls.person1.houses.add(cls.house2)
cls.house3 = House.objects.create(
name="House 3", address="6 Downing St", owner=cls.person2
)
cls.room3_1 = Room.objects.create(name="Dining room", house=cls.house3)
cls.room3_2 = Room.objects.create(name="Lounge", house=cls.house3)
cls.room3_3 = Room.objects.create(name="Kitchen", house=cls.house3)
cls.house3.main_room = cls.room3_1
cls.house3.save()
cls.person2.houses.add(cls.house3)
cls.house4 = House.objects.create(
name="house 4", address="7 Regents St", owner=cls.person2
)
cls.room4_1 = Room.objects.create(name="Dining room", house=cls.house4)
cls.room4_2 = Room.objects.create(name="Lounge", house=cls.house4)
cls.room4_3 = Room.objects.create(name="Kitchen", house=cls.house4)
cls.house4.main_room = cls.room4_1
cls.house4.save()
cls.person2.houses.add(cls.house4)
def test_traverse_qs(self):
qs = Person.objects.prefetch_related("houses")
related_objs_normal = ([list(p.houses.all()) for p in qs],)
related_objs_from_traverse = [
[inner[0] for inner in o[1]] for o in self.traverse_qs(qs, [["houses"]])
]
self.assertEqual(related_objs_normal, (related_objs_from_traverse,))
def test_ambiguous(self):
# Ambiguous: Lookup was already seen with a different queryset.
msg = (
"'houses' lookup was already seen with a different queryset. You "
"may need to adjust the ordering of your lookups."
)
# lookup.queryset shouldn't be evaluated.
with self.assertNumQueries(3):
with self.assertRaisesMessage(ValueError, msg):
self.traverse_qs(
Person.objects.prefetch_related(
"houses__rooms",
Prefetch("houses", queryset=House.objects.all()),
),
[["houses", "rooms"]],
)
# Ambiguous: Lookup houses_lst doesn't yet exist when performing
# houses_lst__rooms.
msg = (
"Cannot find 'houses_lst' on Person object, 'houses_lst__rooms' is "
"an invalid parameter to prefetch_related()"
)
with self.assertRaisesMessage(AttributeError, msg):
self.traverse_qs(
Person.objects.prefetch_related(
"houses_lst__rooms",
Prefetch(
"houses", queryset=House.objects.all(), to_attr="houses_lst"
),
),
[["houses", "rooms"]],
)
# Not ambiguous.
self.traverse_qs(
Person.objects.prefetch_related("houses__rooms", "houses"),
[["houses", "rooms"]],
)
self.traverse_qs(
Person.objects.prefetch_related(
"houses__rooms",
Prefetch("houses", queryset=House.objects.all(), to_attr="houses_lst"),
),
[["houses", "rooms"]],
)
def test_m2m(self):
# Control lookups.
with self.assertNumQueries(2):
lst1 = self.traverse_qs(
Person.objects.prefetch_related("houses"), [["houses"]]
)
# Test lookups.
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch("houses")), [["houses"]]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
Prefetch("houses", to_attr="houses_lst")
),
[["houses_lst"]],
)
self.assertEqual(lst1, lst2)
def test_reverse_m2m(self):
# Control lookups.
with self.assertNumQueries(2):
lst1 = self.traverse_qs(
House.objects.prefetch_related("occupants"), [["occupants"]]
)
# Test lookups.
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
House.objects.prefetch_related(Prefetch("occupants")), [["occupants"]]
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
lst2 = self.traverse_qs(
House.objects.prefetch_related(
Prefetch("occupants", to_attr="occupants_lst")
),
[["occupants_lst"]],
)
self.assertEqual(lst1, lst2)
def test_m2m_through_fk(self):
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
Room.objects.prefetch_related("house__occupants"),
[["house", "occupants"]],
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Room.objects.prefetch_related(Prefetch("house__occupants")),
[["house", "occupants"]],
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Room.objects.prefetch_related(
Prefetch("house__occupants", to_attr="occupants_lst")
),
[["house", "occupants_lst"]],
)
self.assertEqual(lst1, lst2)
def test_m2m_through_gfk(self):
TaggedItem.objects.create(tag="houses", content_object=self.house1)
TaggedItem.objects.create(tag="houses", content_object=self.house2)
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
TaggedItem.objects.filter(tag="houses").prefetch_related(
"content_object__rooms"
),
[["content_object", "rooms"]],
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
TaggedItem.objects.prefetch_related(
Prefetch("content_object"),
Prefetch("content_object__rooms", to_attr="rooms_lst"),
),
[["content_object", "rooms_lst"]],
)
self.assertEqual(lst1, lst2)
def test_o2m_through_m2m(self):
# Control lookups.
with self.assertNumQueries(3):
lst1 = self.traverse_qs(
Person.objects.prefetch_related("houses", "houses__rooms"),
[["houses", "rooms"]],
)
# Test lookups.
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(Prefetch("houses"), "houses__rooms"),
[["houses", "rooms"]],
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
Prefetch("houses"), Prefetch("houses__rooms")
),
[["houses", "rooms"]],
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
Prefetch("houses", to_attr="houses_lst"), "houses_lst__rooms"
),
[["houses_lst", "rooms"]],
)
self.assertEqual(lst1, lst2)
with self.assertNumQueries(3):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
Prefetch("houses", to_attr="houses_lst"),
Prefetch("houses_lst__rooms", to_attr="rooms_lst"),
),
[["houses_lst", "rooms_lst"]],
)
self.assertEqual(lst1, lst2)
def test_generic_rel(self):
bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/")
TaggedItem.objects.create(content_object=bookmark, tag="django")
TaggedItem.objects.create(
content_object=bookmark, favorite=bookmark, tag="python"
)
# Control lookups.
with self.assertNumQueries(4):
lst1 = self.traverse_qs(
Bookmark.objects.prefetch_related(
"tags", "tags__content_object", "favorite_tags"
),
[["tags", "content_object"], ["favorite_tags"]],
)
# Test lookups.
with self.assertNumQueries(4):
lst2 = self.traverse_qs(
Bookmark.objects.prefetch_related(
Prefetch("tags", to_attr="tags_lst"),
Prefetch("tags_lst__content_object"),
Prefetch("favorite_tags"),
),
[["tags_lst", "content_object"], ["favorite_tags"]],
)
self.assertEqual(lst1, lst2)
def test_traverse_single_item_property(self):
# Control lookups.
with self.assertNumQueries(5):
lst1 = self.traverse_qs(
Person.objects.prefetch_related(
"houses__rooms",
"primary_house__occupants__houses",
),
[["primary_house", "occupants", "houses"]],
)
# Test lookups.
with self.assertNumQueries(5):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
"houses__rooms",
Prefetch("primary_house__occupants", to_attr="occupants_lst"),
"primary_house__occupants_lst__houses",
),
[["primary_house", "occupants_lst", "houses"]],
)
self.assertEqual(lst1, lst2)
def test_traverse_multiple_items_property(self):
# Control lookups.
with self.assertNumQueries(4):
lst1 = self.traverse_qs(
Person.objects.prefetch_related(
"houses",
"all_houses__occupants__houses",
),
[["all_houses", "occupants", "houses"]],
)
# Test lookups.
with self.assertNumQueries(4):
lst2 = self.traverse_qs(
Person.objects.prefetch_related(
"houses",
Prefetch("all_houses__occupants", to_attr="occupants_lst"),
"all_houses__occupants_lst__houses",
),
[["all_houses", "occupants_lst", "houses"]],
)
self.assertEqual(lst1, lst2)
def test_custom_qs(self):
# Test basic.
with self.assertNumQueries(2):
lst1 = list(Person.objects.prefetch_related("houses"))
with self.assertNumQueries(2):
lst2 = list(
Person.objects.prefetch_related(
Prefetch(
"houses", queryset=House.objects.all(), to_attr="houses_lst"
)
)
)
self.assertEqual(
self.traverse_qs(lst1, [["houses"]]),
self.traverse_qs(lst2, [["houses_lst"]]),
)
# Test queryset filtering.
with self.assertNumQueries(2):
lst2 = list(
Person.objects.prefetch_related(
Prefetch(
"houses",
queryset=House.objects.filter(
pk__in=[self.house1.pk, self.house3.pk]
),
to_attr="houses_lst",
)
)
)
self.assertEqual(len(lst2[0].houses_lst), 1)
self.assertEqual(lst2[0].houses_lst[0], self.house1)
self.assertEqual(len(lst2[1].houses_lst), 1)
self.assertEqual(lst2[1].houses_lst[0], self.house3)
# Test flattened.
with self.assertNumQueries(3):
lst1 = list(Person.objects.prefetch_related("houses__rooms"))
with self.assertNumQueries(3):
lst2 = list(
Person.objects.prefetch_related(
Prefetch(
"houses__rooms",
queryset=Room.objects.all(),
to_attr="rooms_lst",
)
)
)
self.assertEqual(
self.traverse_qs(lst1, [["houses", "rooms"]]),
self.traverse_qs(lst2, [["houses", "rooms_lst"]]),
)
# Test inner select_related.
with self.assertNumQueries(3):
lst1 = list(Person.objects.prefetch_related("houses__owner"))
with self.assertNumQueries(2):
lst2 = list(
Person.objects.prefetch_related(
Prefetch("houses", queryset=House.objects.select_related("owner"))
)
)
self.assertEqual(
self.traverse_qs(lst1, [["houses", "owner"]]),
self.traverse_qs(lst2, [["houses", "owner"]]),
)
# Test inner prefetch.
inner_rooms_qs = Room.objects.filter(pk__in=[self.room1_1.pk, self.room1_2.pk])
houses_qs_prf = House.objects.prefetch_related(
Prefetch("rooms", queryset=inner_rooms_qs, to_attr="rooms_lst")
)
with self.assertNumQueries(4):
lst2 = list(
Person.objects.prefetch_related(
Prefetch(
"houses",
queryset=houses_qs_prf.filter(pk=self.house1.pk),
to_attr="houses_lst",
),
Prefetch("houses_lst__rooms_lst__main_room_of"),
)
)
self.assertEqual(len(lst2[0].houses_lst[0].rooms_lst), 2)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0], self.room1_1)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[1], self.room1_2)
self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0].main_room_of, self.house1)
self.assertEqual(len(lst2[1].houses_lst), 0)
# Test ForwardManyToOneDescriptor.
houses = House.objects.select_related("owner")
with self.assertNumQueries(6):
rooms = Room.objects.prefetch_related("house")
lst1 = self.traverse_qs(rooms, [["house", "owner"]])
with self.assertNumQueries(2):
rooms = Room.objects.prefetch_related(Prefetch("house", queryset=houses))
lst2 = self.traverse_qs(rooms, [["house", "owner"]])
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
houses = House.objects.select_related("owner")
rooms = Room.objects.prefetch_related(
Prefetch("house", queryset=houses, to_attr="house_attr")
)
lst2 = self.traverse_qs(rooms, [["house_attr", "owner"]])
self.assertEqual(lst1, lst2)
room = Room.objects.prefetch_related(
Prefetch("house", queryset=houses.filter(address="DoesNotExist"))
).first()
with self.assertRaises(ObjectDoesNotExist):
getattr(room, "house")
room = Room.objects.prefetch_related(
Prefetch(
"house",
queryset=houses.filter(address="DoesNotExist"),
to_attr="house_attr",
)
).first()
self.assertIsNone(room.house_attr)
rooms = Room.objects.prefetch_related(
Prefetch("house", queryset=House.objects.only("name"))
)
with self.assertNumQueries(2):
getattr(rooms.first().house, "name")
with self.assertNumQueries(3):
getattr(rooms.first().house, "address")
# Test ReverseOneToOneDescriptor.
houses = House.objects.select_related("owner")
with self.assertNumQueries(6):
rooms = Room.objects.prefetch_related("main_room_of")
lst1 = self.traverse_qs(rooms, [["main_room_of", "owner"]])
with self.assertNumQueries(2):
rooms = Room.objects.prefetch_related(
Prefetch("main_room_of", queryset=houses)
)
lst2 = self.traverse_qs(rooms, [["main_room_of", "owner"]])
self.assertEqual(lst1, lst2)
with self.assertNumQueries(2):
rooms = list(
Room.objects.prefetch_related(
Prefetch(
"main_room_of",
queryset=houses,
to_attr="main_room_of_attr",
)
)
)
lst2 = self.traverse_qs(rooms, [["main_room_of_attr", "owner"]])
self.assertEqual(lst1, lst2)
room = (
Room.objects.filter(main_room_of__isnull=False)
.prefetch_related(
Prefetch("main_room_of", queryset=houses.filter(address="DoesNotExist"))
)
.first()
)
with self.assertRaises(ObjectDoesNotExist):
getattr(room, "main_room_of")
room = (
Room.objects.filter(main_room_of__isnull=False)
.prefetch_related(
Prefetch(
"main_room_of",
queryset=houses.filter(address="DoesNotExist"),
to_attr="main_room_of_attr",
)
)
.first()
)
self.assertIsNone(room.main_room_of_attr)
# The custom queryset filters should be applied to the queryset
# instance returned by the manager.
person = Person.objects.prefetch_related(
Prefetch("houses", queryset=House.objects.filter(name="House 1")),
).get(pk=self.person1.pk)
self.assertEqual(
list(person.houses.all()),
list(person.houses.all().all()),
)
def test_nested_prefetch_related_are_not_overwritten(self):
# Regression test for #24873
houses_2 = House.objects.prefetch_related(Prefetch("rooms"))
persons = Person.objects.prefetch_related(Prefetch("houses", queryset=houses_2))
houses = House.objects.prefetch_related(Prefetch("occupants", queryset=persons))
list(houses) # queryset must be evaluated once to reproduce the bug.
self.assertEqual(
houses.all()[0].occupants.all()[0].houses.all()[1].rooms.all()[0],
self.room2_1,
)
def test_nested_prefetch_related_with_duplicate_prefetcher(self):
"""
Nested prefetches whose name clashes with descriptor names
(Person.houses here) are allowed.
"""
occupants = Person.objects.prefetch_related(
Prefetch("houses", to_attr="some_attr_name"),
Prefetch("houses", queryset=House.objects.prefetch_related("main_room")),
)
houses = House.objects.prefetch_related(
Prefetch("occupants", queryset=occupants)
)
with self.assertNumQueries(5):
self.traverse_qs(list(houses), [["occupants", "houses", "main_room"]])
def test_values_queryset(self):
msg = "Prefetch querysets cannot use raw(), values(), and values_list()."
with self.assertRaisesMessage(ValueError, msg):
Prefetch("houses", House.objects.values("pk"))
with self.assertRaisesMessage(ValueError, msg):
Prefetch("houses", House.objects.values_list("pk"))
# That error doesn't affect managers with custom ModelIterable subclasses
self.assertIs(
Teacher.objects_custom.all()._iterable_class, ModelIterableSubclass
)
Prefetch("teachers", Teacher.objects_custom.all())
def test_raw_queryset(self):
msg = "Prefetch querysets cannot use raw(), values(), and values_list()."
with self.assertRaisesMessage(ValueError, msg):
Prefetch("houses", House.objects.raw("select pk from house"))
def test_to_attr_doesnt_cache_through_attr_as_list(self):
house = House.objects.prefetch_related(
Prefetch("rooms", queryset=Room.objects.all(), to_attr="to_rooms"),
).get(pk=self.house3.pk)
self.assertIsInstance(house.rooms.all(), QuerySet)
def test_to_attr_cached_property(self):
persons = Person.objects.prefetch_related(
Prefetch("houses", House.objects.all(), to_attr="cached_all_houses"),
)
for person in persons:
# To bypass caching at the related descriptor level, don't use
# person.houses.all() here.
all_houses = list(House.objects.filter(occupants=person))
with self.assertNumQueries(0):
self.assertEqual(person.cached_all_houses, all_houses)
def test_filter_deferred(self):
"""
Related filtering of prefetched querysets is deferred until necessary.
"""
add_q = Query.add_q
with mock.patch.object(
Query,
"add_q",
autospec=True,
side_effect=lambda self, q: add_q(self, q),
) as add_q_mock:
list(
House.objects.prefetch_related(
Prefetch("occupants", queryset=Person.objects.all())
)
)
self.assertEqual(add_q_mock.call_count, 1)
class DefaultManagerTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.qual1 = Qualification.objects.create(name="BA")
cls.qual2 = Qualification.objects.create(name="BSci")
cls.qual3 = Qualification.objects.create(name="MA")
cls.qual4 = Qualification.objects.create(name="PhD")
cls.teacher1 = Teacher.objects.create(name="Mr Cleese")
cls.teacher2 = Teacher.objects.create(name="Mr Idle")
cls.teacher3 = Teacher.objects.create(name="Mr Chapman")
cls.teacher1.qualifications.add(cls.qual1, cls.qual2, cls.qual3, cls.qual4)
cls.teacher2.qualifications.add(cls.qual1)
cls.teacher3.qualifications.add(cls.qual2)
cls.dept1 = Department.objects.create(name="English")
cls.dept2 = Department.objects.create(name="Physics")
cls.dept1.teachers.add(cls.teacher1, cls.teacher2)
cls.dept2.teachers.add(cls.teacher1, cls.teacher3)
def test_m2m_then_m2m(self):
with self.assertNumQueries(3):
# When we prefetch the teachers, and force the query, we don't want
# the default manager on teachers to immediately get all the related
# qualifications, since this will do one query per teacher.
qs = Department.objects.prefetch_related("teachers")
depts = "".join(
"%s department: %s\n"
% (dept.name, ", ".join(str(t) for t in dept.teachers.all()))
for dept in qs
)
self.assertEqual(
depts,
"English department: Mr Cleese (BA, BSci, MA, PhD), Mr Idle (BA)\n"
"Physics department: Mr Cleese (BA, BSci, MA, PhD), Mr Chapman "
"(BSci)\n",
)
class GenericRelationTests(TestCase):
@classmethod
def setUpTestData(cls):
book1 = Book.objects.create(title="Winnie the Pooh")
book2 = Book.objects.create(title="Do you like green eggs and spam?")
book3 = Book.objects.create(title="Three Men In A Boat")
reader1 = Reader.objects.create(name="me")
reader2 = Reader.objects.create(name="you")
reader3 = Reader.objects.create(name="someone")
book1.read_by.add(reader1, reader2)
book2.read_by.add(reader2)
book3.read_by.add(reader3)
cls.book1, cls.book2, cls.book3 = book1, book2, book3
cls.reader1, cls.reader2, cls.reader3 = reader1, reader2, reader3
def test_prefetch_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="great", content_object=self.reader1)
TaggedItem.objects.create(tag="outstanding", content_object=self.book2)
TaggedItem.objects.create(tag="amazing", content_object=self.reader3)
# 1 for TaggedItem table, 1 for Book table, 1 for Reader table
with self.assertNumQueries(3):
qs = TaggedItem.objects.prefetch_related("content_object")
list(qs)
def test_prefetch_GFK_nonint_pk(self):
Comment.objects.create(comment="awesome", content_object=self.book1)
# 1 for Comment table, 1 for Book table
with self.assertNumQueries(2):
qs = Comment.objects.prefetch_related("content_object")
[c.content_object for c in qs]
def test_prefetch_GFK_uuid_pk(self):
article = Article.objects.create(name="Django")
Comment.objects.create(comment="awesome", content_object_uuid=article)
qs = Comment.objects.prefetch_related("content_object_uuid")
self.assertEqual([c.content_object_uuid for c in qs], [article])
def test_prefetch_GFK_fk_pk(self):
book = Book.objects.create(title="Poems")
book_with_year = BookWithYear.objects.create(book=book, published_year=2019)
Comment.objects.create(comment="awesome", content_object=book_with_year)
qs = Comment.objects.prefetch_related("content_object")
self.assertEqual([c.content_object for c in qs], [book_with_year])
def test_traverse_GFK(self):
"""
A 'content_object' can be traversed with prefetch_related() and
get to related objects on the other side (assuming it is suitably
filtered)
"""
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="awesome", content_object=self.book2)
TaggedItem.objects.create(tag="awesome", content_object=self.book3)
TaggedItem.objects.create(tag="awesome", content_object=self.reader1)
TaggedItem.objects.create(tag="awesome", content_object=self.reader2)
ct = ContentType.objects.get_for_model(Book)
# We get 3 queries - 1 for main query, 1 for content_objects since they
# all use the same table, and 1 for the 'read_by' relation.
with self.assertNumQueries(3):
# If we limit to books, we know that they will have 'read_by'
# attributes, so the following makes sense:
qs = TaggedItem.objects.filter(
content_type=ct, tag="awesome"
).prefetch_related("content_object__read_by")
readers_of_awesome_books = {
r.name for tag in qs for r in tag.content_object.read_by.all()
}
self.assertEqual(readers_of_awesome_books, {"me", "you", "someone"})
def test_nullable_GFK(self):
TaggedItem.objects.create(
tag="awesome", content_object=self.book1, created_by=self.reader1
)
TaggedItem.objects.create(tag="great", content_object=self.book2)
TaggedItem.objects.create(tag="rubbish", content_object=self.book3)
with self.assertNumQueries(2):
result = [
t.created_by for t in TaggedItem.objects.prefetch_related("created_by")
]
self.assertEqual(result, [t.created_by for t in TaggedItem.objects.all()])
def test_generic_relation(self):
bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/")
TaggedItem.objects.create(content_object=bookmark, tag="django")
TaggedItem.objects.create(content_object=bookmark, tag="python")
with self.assertNumQueries(2):
tags = [
t.tag
for b in Bookmark.objects.prefetch_related("tags")
for t in b.tags.all()
]
self.assertEqual(sorted(tags), ["django", "python"])
def test_charfield_GFK(self):
b = Bookmark.objects.create(url="http://www.djangoproject.com/")
TaggedItem.objects.create(content_object=b, tag="django")
TaggedItem.objects.create(content_object=b, favorite=b, tag="python")
with self.assertNumQueries(3):
bookmark = Bookmark.objects.filter(pk=b.pk).prefetch_related(
"tags", "favorite_tags"
)[0]
self.assertEqual(
sorted(i.tag for i in bookmark.tags.all()), ["django", "python"]
)
self.assertEqual([i.tag for i in bookmark.favorite_tags.all()], ["python"])
def test_custom_queryset(self):
bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/")
django_tag = TaggedItem.objects.create(content_object=bookmark, tag="django")
TaggedItem.objects.create(content_object=bookmark, tag="python")
with self.assertNumQueries(2):
bookmark = Bookmark.objects.prefetch_related(
Prefetch("tags", TaggedItem.objects.filter(tag="django")),
).get()
with self.assertNumQueries(0):
self.assertEqual(list(bookmark.tags.all()), [django_tag])
# The custom queryset filters should be applied to the queryset
# instance returned by the manager.
self.assertEqual(list(bookmark.tags.all()), list(bookmark.tags.all().all()))
def test_deleted_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="awesome", content_object=self.book2)
ct = ContentType.objects.get_for_model(Book)
book1_pk = self.book1.pk
self.book1.delete()
with self.assertNumQueries(2):
qs = TaggedItem.objects.filter(tag="awesome").prefetch_related(
"content_object"
)
result = [
(tag.object_id, tag.content_type_id, tag.content_object) for tag in qs
]
self.assertEqual(
result,
[
(book1_pk, ct.pk, None),
(self.book2.pk, ct.pk, self.book2),
],
)
class MultiTableInheritanceTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.book1 = BookWithYear.objects.create(title="Poems", published_year=2010)
cls.book2 = BookWithYear.objects.create(title="More poems", published_year=2011)
cls.author1 = AuthorWithAge.objects.create(
name="Jane", first_book=cls.book1, age=50
)
cls.author2 = AuthorWithAge.objects.create(
name="Tom", first_book=cls.book1, age=49
)
cls.author3 = AuthorWithAge.objects.create(
name="Robert", first_book=cls.book2, age=48
)
cls.author_address = AuthorAddress.objects.create(
author=cls.author1, address="SomeStreet 1"
)
cls.book2.aged_authors.add(cls.author2, cls.author3)
cls.br1 = BookReview.objects.create(book=cls.book1, notes="review book1")
cls.br2 = BookReview.objects.create(book=cls.book2, notes="review book2")
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = AuthorWithAge.objects.prefetch_related("addresses")
addresses = [
[str(address) for address in obj.addresses.all()] for obj in qs
]
self.assertEqual(addresses, [[str(self.author_address)], [], []])
def test_foreignkey_to_inherited(self):
with self.assertNumQueries(2):
qs = BookReview.objects.prefetch_related("book")
titles = [obj.book.title for obj in qs]
self.assertCountEqual(titles, ["Poems", "More poems"])
def test_m2m_to_inheriting_model(self):
qs = AuthorWithAge.objects.prefetch_related("books_with_year")
with self.assertNumQueries(2):
lst = [
[str(book) for book in author.books_with_year.all()] for author in qs
]
qs = AuthorWithAge.objects.all()
lst2 = [[str(book) for book in author.books_with_year.all()] for author in qs]
self.assertEqual(lst, lst2)
qs = BookWithYear.objects.prefetch_related("aged_authors")
with self.assertNumQueries(2):
lst = [[str(author) for author in book.aged_authors.all()] for book in qs]
qs = BookWithYear.objects.all()
lst2 = [[str(author) for author in book.aged_authors.all()] for book in qs]
self.assertEqual(lst, lst2)
def test_parent_link_prefetch(self):
with self.assertNumQueries(2):
[a.author for a in AuthorWithAge.objects.prefetch_related("author")]
@override_settings(DEBUG=True)
def test_child_link_prefetch(self):
with self.assertNumQueries(2):
authors = [
a.authorwithage
for a in Author.objects.prefetch_related("authorwithage")
]
# Regression for #18090: the prefetching query must include an IN clause.
# Note that on Oracle the table name is upper case in the generated SQL,
# thus the .lower() call.
self.assertIn("authorwithage", connection.queries[-1]["sql"].lower())
self.assertIn(" IN ", connection.queries[-1]["sql"])
self.assertEqual(authors, [a.authorwithage for a in Author.objects.all()])
class ForeignKeyToFieldTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.book = Book.objects.create(title="Poems")
cls.author1 = Author.objects.create(name="Jane", first_book=cls.book)
cls.author2 = Author.objects.create(name="Tom", first_book=cls.book)
cls.author3 = Author.objects.create(name="Robert", first_book=cls.book)
cls.author_address = AuthorAddress.objects.create(
author=cls.author1, address="SomeStreet 1"
)
FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2)
FavoriteAuthors.objects.create(author=cls.author2, likes_author=cls.author3)
FavoriteAuthors.objects.create(author=cls.author3, likes_author=cls.author1)
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = Author.objects.prefetch_related("addresses")
addresses = [
[str(address) for address in obj.addresses.all()] for obj in qs
]
self.assertEqual(addresses, [[str(self.author_address)], [], []])
def test_m2m(self):
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related("favorite_authors", "favors_me")
favorites = [
(
[str(i_like) for i_like in author.favorite_authors.all()],
[str(likes_me) for likes_me in author.favors_me.all()],
)
for author in qs
]
self.assertEqual(
favorites,
[
([str(self.author2)], [str(self.author3)]),
([str(self.author3)], [str(self.author1)]),
([str(self.author1)], [str(self.author2)]),
],
)
class LookupOrderingTest(TestCase):
"""
Test cases that demonstrate that ordering of lookups is important, and
ensure it is preserved.
"""
@classmethod
def setUpTestData(cls):
person1 = Person.objects.create(name="Joe")
person2 = Person.objects.create(name="Mary")
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
house1 = House.objects.create(address="123 Main St")
room1_1 = Room.objects.create(name="Dining room", house=house1)
Room.objects.create(name="Lounge", house=house1)
Room.objects.create(name="Kitchen", house=house1)
house1.main_room = room1_1
house1.save()
person1.houses.add(house1)
house2 = House.objects.create(address="45 Side St")
room2_1 = Room.objects.create(name="Dining room", house=house2)
Room.objects.create(name="Lounge", house=house2)
house2.main_room = room2_1
house2.save()
person1.houses.add(house2)
house3 = House.objects.create(address="6 Downing St")
room3_1 = Room.objects.create(name="Dining room", house=house3)
Room.objects.create(name="Lounge", house=house3)
Room.objects.create(name="Kitchen", house=house3)
house3.main_room = room3_1
house3.save()
person2.houses.add(house3)
house4 = House.objects.create(address="7 Regents St")
room4_1 = Room.objects.create(name="Dining room", house=house4)
Room.objects.create(name="Lounge", house=house4)
house4.main_room = room4_1
house4.save()
person2.houses.add(house4)
def test_order(self):
with self.assertNumQueries(4):
# The following two queries must be done in the same order as written,
# otherwise 'primary_house' will cause non-prefetched lookups
qs = Person.objects.prefetch_related(
"houses__rooms", "primary_house__occupants"
)
[list(p.primary_house.occupants.all()) for p in qs]
class NullableTest(TestCase):
@classmethod
def setUpTestData(cls):
boss = Employee.objects.create(name="Peter")
Employee.objects.create(name="Joe", boss=boss)
Employee.objects.create(name="Angela", boss=boss)
def test_traverse_nullable(self):
# Because we use select_related() for 'boss', it doesn't need to be
# prefetched, but we can still traverse it although it contains some nulls
with self.assertNumQueries(2):
qs = Employee.objects.select_related("boss").prefetch_related("boss__serfs")
co_serfs = [
list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs
]
qs2 = Employee.objects.select_related("boss")
co_serfs2 = [
list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2
]
self.assertEqual(co_serfs, co_serfs2)
def test_prefetch_nullable(self):
# One for main employee, one for boss, one for serfs
with self.assertNumQueries(3):
qs = Employee.objects.prefetch_related("boss__serfs")
co_serfs = [
list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs
]
qs2 = Employee.objects.all()
co_serfs2 = [
list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2
]
self.assertEqual(co_serfs, co_serfs2)
def test_in_bulk(self):
"""
In-bulk does correctly prefetch objects by not using .iterator()
directly.
"""
boss1 = Employee.objects.create(name="Peter")
boss2 = Employee.objects.create(name="Jack")
with self.assertNumQueries(2):
# Prefetch is done and it does not cause any errors.
bulk = Employee.objects.prefetch_related("serfs").in_bulk(
[boss1.pk, boss2.pk]
)
for b in bulk.values():
list(b.serfs.all())
class MultiDbTests(TestCase):
databases = {"default", "other"}
def test_using_is_honored_m2m(self):
B = Book.objects.using("other")
A = Author.objects.using("other")
book1 = B.create(title="Poems")
book2 = B.create(title="Jane Eyre")
book3 = B.create(title="Wuthering Heights")
book4 = B.create(title="Sense and Sensibility")
author1 = A.create(name="Charlotte", first_book=book1)
author2 = A.create(name="Anne", first_book=book1)
author3 = A.create(name="Emily", first_book=book1)
author4 = A.create(name="Jane", first_book=book4)
book1.authors.add(author1, author2, author3)
book2.authors.add(author1)
book3.authors.add(author3)
book4.authors.add(author4)
# Forward
qs1 = B.prefetch_related("authors")
with self.assertNumQueries(2, using="other"):
books = "".join(
"%s (%s)\n"
% (book.title, ", ".join(a.name for a in book.authors.all()))
for book in qs1
)
self.assertEqual(
books,
"Poems (Charlotte, Anne, Emily)\n"
"Jane Eyre (Charlotte)\n"
"Wuthering Heights (Emily)\n"
"Sense and Sensibility (Jane)\n",
)
# Reverse
qs2 = A.prefetch_related("books")
with self.assertNumQueries(2, using="other"):
authors = "".join(
"%s: %s\n"
% (author.name, ", ".join(b.title for b in author.books.all()))
for author in qs2
)
self.assertEqual(
authors,
"Charlotte: Poems, Jane Eyre\n"
"Anne: Poems\n"
"Emily: Poems, Wuthering Heights\n"
"Jane: Sense and Sensibility\n",
)
def test_using_is_honored_fkey(self):
B = Book.objects.using("other")
A = Author.objects.using("other")
book1 = B.create(title="Poems")
book2 = B.create(title="Sense and Sensibility")
A.create(name="Charlotte Bronte", first_book=book1)
A.create(name="Jane Austen", first_book=book2)
# Forward
with self.assertNumQueries(2, using="other"):
books = ", ".join(
a.first_book.title for a in A.prefetch_related("first_book")
)
self.assertEqual("Poems, Sense and Sensibility", books)
# Reverse
with self.assertNumQueries(2, using="other"):
books = "".join(
"%s (%s)\n"
% (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related("first_time_authors")
)
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
def test_using_is_honored_inheritance(self):
B = BookWithYear.objects.using("other")
A = AuthorWithAge.objects.using("other")
book1 = B.create(title="Poems", published_year=2010)
B.create(title="More poems", published_year=2011)
A.create(name="Jane", first_book=book1, age=50)
A.create(name="Tom", first_book=book1, age=49)
# parent link
with self.assertNumQueries(2, using="other"):
authors = ", ".join(a.author.name for a in A.prefetch_related("author"))
self.assertEqual(authors, "Jane, Tom")
# child link
with self.assertNumQueries(2, using="other"):
ages = ", ".join(
str(a.authorwithage.age) for a in A.prefetch_related("authorwithage")
)
self.assertEqual(ages, "50, 49")
def test_using_is_honored_custom_qs(self):
B = Book.objects.using("other")
A = Author.objects.using("other")
book1 = B.create(title="Poems")
book2 = B.create(title="Sense and Sensibility")
A.create(name="Charlotte Bronte", first_book=book1)
A.create(name="Jane Austen", first_book=book2)
# Implicit hinting
with self.assertNumQueries(2, using="other"):
prefetch = Prefetch("first_time_authors", queryset=Author.objects.all())
books = "".join(
"%s (%s)\n"
% (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)
)
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on the same db.
with self.assertNumQueries(2, using="other"):
prefetch = Prefetch(
"first_time_authors", queryset=Author.objects.using("other")
)
books = "".join(
"%s (%s)\n"
% (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)
)
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on a different db.
with self.assertNumQueries(1, using="default"), self.assertNumQueries(
1, using="other"
):
prefetch = Prefetch(
"first_time_authors", queryset=Author.objects.using("default")
)
books = "".join(
"%s (%s)\n"
% (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)
)
self.assertEqual(books, "Poems ()\n" "Sense and Sensibility ()\n")
class Ticket19607Tests(TestCase):
@classmethod
def setUpTestData(cls):
LessonEntry.objects.bulk_create(
LessonEntry(id=id_, name1=name1, name2=name2)
for id_, name1, name2 in [
(1, "einfach", "simple"),
(2, "schwierig", "difficult"),
]
)
WordEntry.objects.bulk_create(
WordEntry(id=id_, lesson_entry_id=lesson_entry_id, name=name)
for id_, lesson_entry_id, name in [
(1, 1, "einfach"),
(2, 1, "simple"),
(3, 2, "schwierig"),
(4, 2, "difficult"),
]
)
def test_bug(self):
list(
WordEntry.objects.prefetch_related(
"lesson_entry", "lesson_entry__wordentry_set"
)
)
class Ticket21410Tests(TestCase):
@classmethod
def setUpTestData(cls):
book1 = Book.objects.create(title="Poems")
book2 = Book.objects.create(title="Jane Eyre")
book3 = Book.objects.create(title="Wuthering Heights")
book4 = Book.objects.create(title="Sense and Sensibility")
author1 = Author2.objects.create(name="Charlotte", first_book=book1)
author2 = Author2.objects.create(name="Anne", first_book=book1)
author3 = Author2.objects.create(name="Emily", first_book=book1)
author4 = Author2.objects.create(name="Jane", first_book=book4)
author1.favorite_books.add(book1, book2, book3)
author2.favorite_books.add(book1)
author3.favorite_books.add(book2)
author4.favorite_books.add(book3)
def test_bug(self):
list(Author2.objects.prefetch_related("first_book", "favorite_books"))
class Ticket21760Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.rooms = []
for _ in range(3):
house = House.objects.create()
for _ in range(3):
cls.rooms.append(Room.objects.create(house=house))
# Set main_room for each house before creating the next one for
# databases where supports_nullable_unique_constraints is False.
house.main_room = cls.rooms[-3]
house.save()
def test_bug(self):
prefetcher = get_prefetcher(self.rooms[0], "house", "house")[0]
queryset = prefetcher.get_prefetch_queryset(list(Room.objects.all()))[0]
self.assertNotIn(" JOIN ", str(queryset.query))
class DirectPrefetchedObjectCacheReuseTests(TestCase):
"""
prefetch_related() reuses objects fetched in _prefetched_objects_cache.
When objects are prefetched and not stored as an instance attribute (often
intermediary relationships), they are saved to the
_prefetched_objects_cache attribute. prefetch_related() takes
_prefetched_objects_cache into account when determining whether an object
has been fetched[1] and retrieves results from it when it is populated [2].
[1]: #25546 (duplicate queries on nested Prefetch)
[2]: #27554 (queryset evaluation fails with a mix of nested and flattened
prefetches)
"""
@classmethod
def setUpTestData(cls):
cls.book1, cls.book2 = [
Book.objects.create(title="book1"),
Book.objects.create(title="book2"),
]
cls.author11, cls.author12, cls.author21 = [
Author.objects.create(first_book=cls.book1, name="Author11"),
Author.objects.create(first_book=cls.book1, name="Author12"),
Author.objects.create(first_book=cls.book2, name="Author21"),
]
cls.author1_address1, cls.author1_address2, cls.author2_address1 = [
AuthorAddress.objects.create(author=cls.author11, address="Happy place"),
AuthorAddress.objects.create(author=cls.author12, address="Haunted house"),
AuthorAddress.objects.create(author=cls.author21, address="Happy place"),
]
cls.bookwithyear1 = BookWithYear.objects.create(
title="Poems", published_year=2010
)
cls.bookreview1 = BookReview.objects.create(book=cls.bookwithyear1)
def test_detect_is_fetched(self):
"""
Nested prefetch_related() shouldn't trigger duplicate queries for the same
lookup.
"""
with self.assertNumQueries(3):
books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related(
Prefetch(
"first_time_authors",
Author.objects.prefetch_related(
Prefetch(
"addresses",
AuthorAddress.objects.filter(address="Happy place"),
)
),
),
)
book1, book2 = list(books)
with self.assertNumQueries(0):
self.assertSequenceEqual(
book1.first_time_authors.all(), [self.author11, self.author12]
)
self.assertSequenceEqual(book2.first_time_authors.all(), [self.author21])
self.assertSequenceEqual(
book1.first_time_authors.all()[0].addresses.all(),
[self.author1_address1],
)
self.assertSequenceEqual(
book1.first_time_authors.all()[1].addresses.all(), []
)
self.assertSequenceEqual(
book2.first_time_authors.all()[0].addresses.all(),
[self.author2_address1],
)
self.assertEqual(
list(book1.first_time_authors.all()),
list(book1.first_time_authors.all().all()),
)
self.assertEqual(
list(book2.first_time_authors.all()),
list(book2.first_time_authors.all().all()),
)
self.assertEqual(
list(book1.first_time_authors.all()[0].addresses.all()),
list(book1.first_time_authors.all()[0].addresses.all().all()),
)
self.assertEqual(
list(book1.first_time_authors.all()[1].addresses.all()),
list(book1.first_time_authors.all()[1].addresses.all().all()),
)
self.assertEqual(
list(book2.first_time_authors.all()[0].addresses.all()),
list(book2.first_time_authors.all()[0].addresses.all().all()),
)
def test_detect_is_fetched_with_to_attr(self):
with self.assertNumQueries(3):
books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related(
Prefetch(
"first_time_authors",
Author.objects.prefetch_related(
Prefetch(
"addresses",
AuthorAddress.objects.filter(address="Happy place"),
to_attr="happy_place",
)
),
to_attr="first_authors",
),
)
book1, book2 = list(books)
with self.assertNumQueries(0):
self.assertEqual(book1.first_authors, [self.author11, self.author12])
self.assertEqual(book2.first_authors, [self.author21])
self.assertEqual(
book1.first_authors[0].happy_place, [self.author1_address1]
)
self.assertEqual(book1.first_authors[1].happy_place, [])
self.assertEqual(
book2.first_authors[0].happy_place, [self.author2_address1]
)
def test_prefetch_reverse_foreign_key(self):
with self.assertNumQueries(2):
(bookwithyear1,) = BookWithYear.objects.prefetch_related("bookreview_set")
with self.assertNumQueries(0):
self.assertCountEqual(
bookwithyear1.bookreview_set.all(), [self.bookreview1]
)
with self.assertNumQueries(0):
prefetch_related_objects([bookwithyear1], "bookreview_set")
def test_add_clears_prefetched_objects(self):
bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk)
prefetch_related_objects([bookwithyear], "bookreview_set")
self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1])
new_review = BookReview.objects.create()
bookwithyear.bookreview_set.add(new_review)
self.assertCountEqual(
bookwithyear.bookreview_set.all(), [self.bookreview1, new_review]
)
def test_remove_clears_prefetched_objects(self):
bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk)
prefetch_related_objects([bookwithyear], "bookreview_set")
self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1])
bookwithyear.bookreview_set.remove(self.bookreview1)
self.assertCountEqual(bookwithyear.bookreview_set.all(), [])
class ReadPrefetchedObjectsCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.book1 = Book.objects.create(title="Les confessions Volume I")
cls.book2 = Book.objects.create(title="Candide")
cls.author1 = AuthorWithAge.objects.create(
name="Rousseau", first_book=cls.book1, age=70
)
cls.author2 = AuthorWithAge.objects.create(
name="Voltaire", first_book=cls.book2, age=65
)
cls.book1.authors.add(cls.author1)
cls.book2.authors.add(cls.author2)
FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2)
def test_retrieves_results_from_prefetched_objects_cache(self):
"""
When intermediary results are prefetched without a destination
attribute, they are saved in the RelatedManager's cache
(_prefetched_objects_cache). prefetch_related() uses this cache
(#27554).
"""
authors = AuthorWithAge.objects.prefetch_related(
Prefetch(
"author",
queryset=Author.objects.prefetch_related(
# Results are saved in the RelatedManager's cache
# (_prefetched_objects_cache) and do not replace the
# RelatedManager on Author instances (favorite_authors)
Prefetch("favorite_authors__first_book"),
),
),
)
with self.assertNumQueries(4):
# AuthorWithAge -> Author -> FavoriteAuthors, Book
self.assertSequenceEqual(authors, [self.author1, self.author2])
class NestedPrefetchTests(TestCase):
@classmethod
def setUpTestData(cls):
house = House.objects.create(name="Big house", address="123 Main St")
cls.room = Room.objects.create(name="Kitchen", house=house)
def test_nested_prefetch_is_not_overwritten_by_related_object(self):
"""
The prefetched relationship is used rather than populating the reverse
relationship from the parent, when prefetching a set of child objects
related to a set of parent objects and the child queryset itself
specifies a prefetch back to the parent.
"""
queryset = House.objects.only("name").prefetch_related(
Prefetch(
"rooms",
queryset=Room.objects.prefetch_related(
Prefetch("house", queryset=House.objects.only("address")),
),
),
)
with self.assertNumQueries(3):
house = queryset.first()
self.assertIs(Room.house.is_cached(self.room), True)
with self.assertNumQueries(0):
house.rooms.first().house.address
class PrefetchLimitTests(TestDataMixin, TestCase):
@skipUnlessDBFeature("supports_over_clause")
def test_m2m_forward(self):
authors = Author.objects.all() # Meta.ordering
with self.assertNumQueries(3):
books = list(
Book.objects.prefetch_related(
Prefetch("authors", authors),
Prefetch("authors", authors[1:], to_attr="authors_sliced"),
)
)
for book in books:
with self.subTest(book=book):
self.assertEqual(book.authors_sliced, list(book.authors.all())[1:])
@skipUnlessDBFeature("supports_over_clause")
def test_m2m_reverse(self):
books = Book.objects.order_by("title")
with self.assertNumQueries(3):
authors = list(
Author.objects.prefetch_related(
Prefetch("books", books),
Prefetch("books", books[1:2], to_attr="books_sliced"),
)
)
for author in authors:
with self.subTest(author=author):
self.assertEqual(author.books_sliced, list(author.books.all())[1:2])
@skipUnlessDBFeature("supports_over_clause")
def test_foreignkey_reverse(self):
authors = Author.objects.order_by("-name")
with self.assertNumQueries(3):
books = list(
Book.objects.prefetch_related(
Prefetch(
"first_time_authors",
authors,
),
Prefetch(
"first_time_authors",
authors[1:],
to_attr="first_time_authors_sliced",
),
)
)
for book in books:
with self.subTest(book=book):
self.assertEqual(
book.first_time_authors_sliced,
list(book.first_time_authors.all())[1:],
)
@skipUnlessDBFeature("supports_over_clause")
def test_reverse_ordering(self):
authors = Author.objects.reverse() # Reverse Meta.ordering
with self.assertNumQueries(3):
books = list(
Book.objects.prefetch_related(
Prefetch("authors", authors),
Prefetch("authors", authors[1:], to_attr="authors_sliced"),
)
)
for book in books:
with self.subTest(book=book):
self.assertEqual(book.authors_sliced, list(book.authors.all())[1:])
@skipIfDBFeature("supports_over_clause")
def test_window_not_supported(self):
authors = Author.objects.all()
msg = (
"Prefetching from a limited queryset is only supported on backends that "
"support window functions."
)
with self.assertRaisesMessage(NotSupportedError, msg):
list(Book.objects.prefetch_related(Prefetch("authors", authors[1:])))
|
eab083958e9381e3035998aba2fe07439d23827b5fd5fff15a5dc9d493fdd7a8 | from django.db import DatabaseError, connection
from django.db.models import Index
from django.test import TransactionTestCase, skipUnlessDBFeature
from .models import (
Article,
ArticleReporter,
CheckConstraintModel,
City,
Comment,
Country,
DbCommentModel,
District,
Reporter,
UniqueConstraintConditionModel,
)
class IntrospectionTests(TransactionTestCase):
available_apps = ["introspection"]
def test_table_names(self):
tl = connection.introspection.table_names()
self.assertEqual(tl, sorted(tl))
self.assertIn(
Reporter._meta.db_table,
tl,
"'%s' isn't in table_list()." % Reporter._meta.db_table,
)
self.assertIn(
Article._meta.db_table,
tl,
"'%s' isn't in table_list()." % Article._meta.db_table,
)
def test_django_table_names(self):
with connection.cursor() as cursor:
cursor.execute("CREATE TABLE django_ixn_test_table (id INTEGER);")
tl = connection.introspection.django_table_names()
cursor.execute("DROP TABLE django_ixn_test_table;")
self.assertNotIn(
"django_ixn_test_table",
tl,
"django_table_names() returned a non-Django table",
)
def test_django_table_names_retval_type(self):
# Table name is a list #15216
tl = connection.introspection.django_table_names(only_existing=True)
self.assertIs(type(tl), list)
tl = connection.introspection.django_table_names(only_existing=False)
self.assertIs(type(tl), list)
def test_table_names_with_views(self):
with connection.cursor() as cursor:
try:
cursor.execute(
"CREATE VIEW introspection_article_view AS SELECT headline "
"from introspection_article;"
)
except DatabaseError as e:
if "insufficient privileges" in str(e):
self.fail("The test user has no CREATE VIEW privileges")
else:
raise
try:
self.assertIn(
"introspection_article_view",
connection.introspection.table_names(include_views=True),
)
self.assertNotIn(
"introspection_article_view", connection.introspection.table_names()
)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP VIEW introspection_article_view")
def test_unmanaged_through_model(self):
tables = connection.introspection.django_table_names()
self.assertNotIn(ArticleReporter._meta.db_table, tables)
def test_installed_models(self):
tables = [Article._meta.db_table, Reporter._meta.db_table]
models = connection.introspection.installed_models(tables)
self.assertEqual(models, {Article, Reporter})
def test_sequence_list(self):
sequences = connection.introspection.sequence_list()
reporter_seqs = [
seq for seq in sequences if seq["table"] == Reporter._meta.db_table
]
self.assertEqual(
len(reporter_seqs), 1, "Reporter sequence not found in sequence_list()"
)
self.assertEqual(reporter_seqs[0]["column"], "id")
def test_get_table_description_names(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, Reporter._meta.db_table
)
self.assertEqual(
[r[0] for r in desc], [f.column for f in Reporter._meta.fields]
)
def test_get_table_description_types(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, Reporter._meta.db_table
)
self.assertEqual(
[connection.introspection.get_field_type(r[1], r) for r in desc],
[
connection.features.introspected_field_types[field]
for field in (
"AutoField",
"CharField",
"CharField",
"CharField",
"BigIntegerField",
"BinaryField",
"SmallIntegerField",
"DurationField",
)
],
)
def test_get_table_description_col_lengths(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, Reporter._meta.db_table
)
self.assertEqual(
[
r[2]
for r in desc
if connection.introspection.get_field_type(r[1], r) == "CharField"
],
[30, 30, 254],
)
def test_get_table_description_nullable(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, Reporter._meta.db_table
)
nullable_by_backend = connection.features.interprets_empty_strings_as_nulls
self.assertEqual(
[r[6] for r in desc],
[
False,
nullable_by_backend,
nullable_by_backend,
nullable_by_backend,
True,
True,
False,
False,
],
)
def test_bigautofield(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, City._meta.db_table
)
self.assertIn(
connection.features.introspected_field_types["BigAutoField"],
[connection.introspection.get_field_type(r[1], r) for r in desc],
)
def test_smallautofield(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, Country._meta.db_table
)
self.assertIn(
connection.features.introspected_field_types["SmallAutoField"],
[connection.introspection.get_field_type(r[1], r) for r in desc],
)
@skipUnlessDBFeature("supports_comments")
def test_db_comments(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(
cursor, DbCommentModel._meta.db_table
)
table_list = connection.introspection.get_table_list(cursor)
self.assertEqual(
["'Name' column comment"],
[field.comment for field in desc if field.name == "name"],
)
self.assertEqual(
["Custom table comment"],
[
table.comment
for table in table_list
if table.name == "introspection_dbcommentmodel"
],
)
# Regression test for #9991 - 'real' types in postgres
@skipUnlessDBFeature("has_real_datatype")
def test_postgresql_real_type(self):
with connection.cursor() as cursor:
cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);")
desc = connection.introspection.get_table_description(
cursor, "django_ixn_real_test_table"
)
cursor.execute("DROP TABLE django_ixn_real_test_table;")
self.assertEqual(
connection.introspection.get_field_type(desc[0][1], desc[0]), "FloatField"
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_get_relations(self):
with connection.cursor() as cursor:
relations = connection.introspection.get_relations(
cursor, Article._meta.db_table
)
# That's {field_name: (field_name_other_table, other_table)}
expected_relations = {
"reporter_id": ("id", Reporter._meta.db_table),
"response_to_id": ("id", Article._meta.db_table),
}
self.assertEqual(relations, expected_relations)
# Removing a field shouldn't disturb get_relations (#17785)
body = Article._meta.get_field("body")
with connection.schema_editor() as editor:
editor.remove_field(Article, body)
with connection.cursor() as cursor:
relations = connection.introspection.get_relations(
cursor, Article._meta.db_table
)
with connection.schema_editor() as editor:
editor.add_field(Article, body)
self.assertEqual(relations, expected_relations)
def test_get_primary_key_column(self):
with connection.cursor() as cursor:
primary_key_column = connection.introspection.get_primary_key_column(
cursor, Article._meta.db_table
)
pk_fk_column = connection.introspection.get_primary_key_column(
cursor, District._meta.db_table
)
self.assertEqual(primary_key_column, "id")
self.assertEqual(pk_fk_column, "city_id")
def test_get_constraints_index_types(self):
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor, Article._meta.db_table
)
index = {}
index2 = {}
for val in constraints.values():
if val["columns"] == ["headline", "pub_date"]:
index = val
if val["columns"] == [
"headline",
"response_to_id",
"pub_date",
"reporter_id",
]:
index2 = val
self.assertEqual(index["type"], Index.suffix)
self.assertEqual(index2["type"], Index.suffix)
@skipUnlessDBFeature("supports_index_column_ordering")
def test_get_constraints_indexes_orders(self):
"""
Indexes have the 'orders' key with a list of 'ASC'/'DESC' values.
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor, Article._meta.db_table
)
indexes_verified = 0
expected_columns = [
["headline", "pub_date"],
["headline", "response_to_id", "pub_date", "reporter_id"],
]
if connection.features.indexes_foreign_keys:
expected_columns += [
["reporter_id"],
["response_to_id"],
]
for val in constraints.values():
if val["index"] and not (val["primary_key"] or val["unique"]):
self.assertIn(val["columns"], expected_columns)
self.assertEqual(val["orders"], ["ASC"] * len(val["columns"]))
indexes_verified += 1
self.assertEqual(indexes_verified, len(expected_columns))
@skipUnlessDBFeature("supports_index_column_ordering", "supports_partial_indexes")
def test_get_constraints_unique_indexes_orders(self):
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor,
UniqueConstraintConditionModel._meta.db_table,
)
self.assertIn("cond_name_without_color_uniq", constraints)
constraint = constraints["cond_name_without_color_uniq"]
self.assertIs(constraint["unique"], True)
self.assertEqual(constraint["columns"], ["name"])
self.assertEqual(constraint["orders"], ["ASC"])
def test_get_constraints(self):
def assertDetails(
details,
cols,
primary_key=False,
unique=False,
index=False,
check=False,
foreign_key=None,
):
# Different backends have different values for same constraints:
# PRIMARY KEY UNIQUE CONSTRAINT UNIQUE INDEX
# MySQL pk=1 uniq=1 idx=1 pk=0 uniq=1 idx=1 pk=0 uniq=1 idx=1
# PostgreSQL pk=1 uniq=1 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1
# SQLite pk=1 uniq=0 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1
if details["primary_key"]:
details["unique"] = True
if details["unique"]:
details["index"] = False
self.assertEqual(details["columns"], cols)
self.assertEqual(details["primary_key"], primary_key)
self.assertEqual(details["unique"], unique)
self.assertEqual(details["index"], index)
self.assertEqual(details["check"], check)
self.assertEqual(details["foreign_key"], foreign_key)
# Test custom constraints
custom_constraints = {
"article_email_pub_date_uniq",
"email_pub_date_idx",
}
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor, Comment._meta.db_table
)
if (
connection.features.supports_column_check_constraints
and connection.features.can_introspect_check_constraints
):
constraints.update(
connection.introspection.get_constraints(
cursor, CheckConstraintModel._meta.db_table
)
)
custom_constraints.add("up_votes_gte_0_check")
assertDetails(
constraints["up_votes_gte_0_check"], ["up_votes"], check=True
)
assertDetails(
constraints["article_email_pub_date_uniq"],
["article_id", "email", "pub_date"],
unique=True,
)
assertDetails(
constraints["email_pub_date_idx"], ["email", "pub_date"], index=True
)
# Test field constraints
field_constraints = set()
for name, details in constraints.items():
if name in custom_constraints:
continue
elif details["columns"] == ["up_votes"] and details["check"]:
assertDetails(details, ["up_votes"], check=True)
field_constraints.add(name)
elif details["columns"] == ["voting_number"] and details["check"]:
assertDetails(details, ["voting_number"], check=True)
field_constraints.add(name)
elif details["columns"] == ["ref"] and details["unique"]:
assertDetails(details, ["ref"], unique=True)
field_constraints.add(name)
elif details["columns"] == ["voting_number"] and details["unique"]:
assertDetails(details, ["voting_number"], unique=True)
field_constraints.add(name)
elif details["columns"] == ["article_id"] and details["index"]:
assertDetails(details, ["article_id"], index=True)
field_constraints.add(name)
elif details["columns"] == ["id"] and details["primary_key"]:
assertDetails(details, ["id"], primary_key=True, unique=True)
field_constraints.add(name)
elif details["columns"] == ["article_id"] and details["foreign_key"]:
assertDetails(
details, ["article_id"], foreign_key=("introspection_article", "id")
)
field_constraints.add(name)
elif details["check"]:
# Some databases (e.g. Oracle) include additional check
# constraints.
field_constraints.add(name)
# All constraints are accounted for.
self.assertEqual(
constraints.keys() ^ (custom_constraints | field_constraints), set()
)
|
70f7b152acc1fef1e8056a4fb28a3f3837484a40bf75c6acaabdb29dc89b2efe | from django.db import models
class City(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=50)
class Country(models.Model):
id = models.SmallAutoField(primary_key=True)
name = models.CharField(max_length=50)
class District(models.Model):
city = models.ForeignKey(City, models.CASCADE, primary_key=True)
name = models.CharField(max_length=50)
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
facebook_user_id = models.BigIntegerField(null=True)
raw_data = models.BinaryField(null=True)
small_int = models.SmallIntegerField()
interval = models.DurationField()
class Meta:
unique_together = ("first_name", "last_name")
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
body = models.TextField(default="")
reporter = models.ForeignKey(Reporter, models.CASCADE)
response_to = models.ForeignKey("self", models.SET_NULL, null=True)
unmanaged_reporters = models.ManyToManyField(
Reporter, through="ArticleReporter", related_name="+"
)
class Meta:
ordering = ("headline",)
indexes = [
models.Index(fields=["headline", "pub_date"]),
models.Index(fields=["headline", "response_to", "pub_date", "reporter"]),
]
class ArticleReporter(models.Model):
article = models.ForeignKey(Article, models.CASCADE)
reporter = models.ForeignKey(Reporter, models.CASCADE)
class Meta:
managed = False
class Comment(models.Model):
ref = models.UUIDField(unique=True)
article = models.ForeignKey(Article, models.CASCADE, db_index=True)
email = models.EmailField()
pub_date = models.DateTimeField()
body = models.TextField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["article", "email", "pub_date"],
name="article_email_pub_date_uniq",
),
]
indexes = [
models.Index(fields=["email", "pub_date"], name="email_pub_date_idx"),
]
class CheckConstraintModel(models.Model):
up_votes = models.PositiveIntegerField()
voting_number = models.PositiveIntegerField(unique=True)
class Meta:
required_db_features = {
"supports_table_check_constraints",
}
constraints = [
models.CheckConstraint(
name="up_votes_gte_0_check", check=models.Q(up_votes__gte=0)
),
]
class UniqueConstraintConditionModel(models.Model):
name = models.CharField(max_length=255)
color = models.CharField(max_length=32, null=True)
class Meta:
required_db_features = {"supports_partial_indexes"}
constraints = [
models.UniqueConstraint(
fields=["name"],
name="cond_name_without_color_uniq",
condition=models.Q(color__isnull=True),
),
]
class DbCommentModel(models.Model):
name = models.CharField(max_length=15, db_comment="'Name' column comment")
class Meta:
db_table_comment = "Custom table comment"
required_db_features = {"supports_comments"}
|
3528abeebffa85c7b0a88d4888306f4f191a9dc397037af684f7f229aae4ae89 | import datetime
import decimal
import gettext as gettext_module
import os
import pickle
import re
import tempfile
from contextlib import contextmanager
from importlib import import_module
from pathlib import Path
from unittest import mock
from asgiref.local import Local
from django import forms
from django.apps import AppConfig
from django.conf import settings
from django.conf.locale import LANG_INFO
from django.conf.urls.i18n import i18n_patterns
from django.template import Context, Template
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
ignore_warnings,
override_settings,
)
from django.utils import translation
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.formats import (
date_format,
get_format,
iter_format_modules,
localize,
localize_input,
reset_format_cache,
sanitize_separators,
sanitize_strftime_format,
time_format,
)
from django.utils.numberformat import format as nformat
from django.utils.safestring import SafeString, mark_safe
from django.utils.translation import (
activate,
check_for_language,
deactivate,
get_language,
get_language_bidi,
get_language_from_request,
get_language_info,
gettext,
gettext_lazy,
ngettext,
ngettext_lazy,
npgettext,
npgettext_lazy,
pgettext,
round_away_from_one,
to_language,
to_locale,
trans_null,
trans_real,
)
from django.utils.translation.reloader import (
translation_file_changed,
watch_for_translation_changes,
)
from .forms import CompanyForm, I18nForm, SelectDateForm
from .models import Company, TestModel
here = os.path.dirname(os.path.abspath(__file__))
extended_locale_paths = settings.LOCALE_PATHS + [
os.path.join(here, "other", "locale"),
]
class AppModuleStub:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@contextmanager
def patch_formats(lang, **settings):
from django.utils.formats import _format_cache
# Populate _format_cache with temporary values
for key, value in settings.items():
_format_cache[(key, lang)] = value
try:
yield
finally:
reset_format_cache()
class TranslationTests(SimpleTestCase):
@translation.override("fr")
def test_plural(self):
"""
Test plurals with ngettext. French differs from English in that 0 is singular.
"""
self.assertEqual(
ngettext("%(num)d year", "%(num)d years", 0) % {"num": 0},
"0 année",
)
self.assertEqual(
ngettext("%(num)d year", "%(num)d years", 2) % {"num": 2},
"2 années",
)
self.assertEqual(
ngettext("%(size)d byte", "%(size)d bytes", 0) % {"size": 0}, "0 octet"
)
self.assertEqual(
ngettext("%(size)d byte", "%(size)d bytes", 2) % {"size": 2}, "2 octets"
)
def test_plural_null(self):
g = trans_null.ngettext
self.assertEqual(g("%(num)d year", "%(num)d years", 0) % {"num": 0}, "0 years")
self.assertEqual(g("%(num)d year", "%(num)d years", 1) % {"num": 1}, "1 year")
self.assertEqual(g("%(num)d year", "%(num)d years", 2) % {"num": 2}, "2 years")
@override_settings(LOCALE_PATHS=extended_locale_paths)
@translation.override("fr")
def test_multiple_plurals_per_language(self):
"""
Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po
has a different plural equation with 3 plurals, this tests if those
plural are honored.
"""
self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1")
self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier")
self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2")
french = trans_real.catalog()
# Internal _catalog can query subcatalogs (from different po files).
self.assertEqual(french._catalog[("%d singular", 0)], "%d singulier")
self.assertEqual(french._catalog[("%(num)d hour", 0)], "%(num)d heure")
def test_override(self):
activate("de")
try:
with translation.override("pl"):
self.assertEqual(get_language(), "pl")
self.assertEqual(get_language(), "de")
with translation.override(None):
self.assertIsNone(get_language())
with translation.override("pl"):
pass
self.assertIsNone(get_language())
self.assertEqual(get_language(), "de")
finally:
deactivate()
def test_override_decorator(self):
@translation.override("pl")
def func_pl():
self.assertEqual(get_language(), "pl")
@translation.override(None)
def func_none():
self.assertIsNone(get_language())
try:
activate("de")
func_pl()
self.assertEqual(get_language(), "de")
func_none()
self.assertEqual(get_language(), "de")
finally:
deactivate()
def test_override_exit(self):
"""
The language restored is the one used when the function was
called, not the one used when the decorator was initialized (#23381).
"""
activate("fr")
@translation.override("pl")
def func_pl():
pass
deactivate()
try:
activate("en")
func_pl()
self.assertEqual(get_language(), "en")
finally:
deactivate()
def test_lazy_objects(self):
"""
Format string interpolation should work with *_lazy objects.
"""
s = gettext_lazy("Add %(name)s")
d = {"name": "Ringo"}
self.assertEqual("Add Ringo", s % d)
with translation.override("de", deactivate=True):
self.assertEqual("Ringo hinzuf\xfcgen", s % d)
with translation.override("pl"):
self.assertEqual("Dodaj Ringo", s % d)
# It should be possible to compare *_lazy objects.
s1 = gettext_lazy("Add %(name)s")
self.assertEqual(s, s1)
s2 = gettext_lazy("Add %(name)s")
s3 = gettext_lazy("Add %(name)s")
self.assertEqual(s2, s3)
self.assertEqual(s, s2)
s4 = gettext_lazy("Some other string")
self.assertNotEqual(s, s4)
def test_lazy_pickle(self):
s1 = gettext_lazy("test")
self.assertEqual(str(s1), "test")
s2 = pickle.loads(pickle.dumps(s1))
self.assertEqual(str(s2), "test")
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_ngettext_lazy(self):
simple_with_format = ngettext_lazy("%d good result", "%d good results")
simple_context_with_format = npgettext_lazy(
"Exclamation", "%d good result", "%d good results"
)
simple_without_format = ngettext_lazy("good result", "good results")
with translation.override("de"):
self.assertEqual(simple_with_format % 1, "1 gutes Resultat")
self.assertEqual(simple_with_format % 4, "4 guten Resultate")
self.assertEqual(simple_context_with_format % 1, "1 gutes Resultat!")
self.assertEqual(simple_context_with_format % 4, "4 guten Resultate!")
self.assertEqual(simple_without_format % 1, "gutes Resultat")
self.assertEqual(simple_without_format % 4, "guten Resultate")
complex_nonlazy = ngettext_lazy(
"Hi %(name)s, %(num)d good result", "Hi %(name)s, %(num)d good results", 4
)
complex_deferred = ngettext_lazy(
"Hi %(name)s, %(num)d good result",
"Hi %(name)s, %(num)d good results",
"num",
)
complex_context_nonlazy = npgettext_lazy(
"Greeting",
"Hi %(name)s, %(num)d good result",
"Hi %(name)s, %(num)d good results",
4,
)
complex_context_deferred = npgettext_lazy(
"Greeting",
"Hi %(name)s, %(num)d good result",
"Hi %(name)s, %(num)d good results",
"num",
)
with translation.override("de"):
self.assertEqual(
complex_nonlazy % {"num": 4, "name": "Jim"},
"Hallo Jim, 4 guten Resultate",
)
self.assertEqual(
complex_deferred % {"name": "Jim", "num": 1},
"Hallo Jim, 1 gutes Resultat",
)
self.assertEqual(
complex_deferred % {"name": "Jim", "num": 5},
"Hallo Jim, 5 guten Resultate",
)
with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"):
complex_deferred % {"name": "Jim"}
self.assertEqual(
complex_context_nonlazy % {"num": 4, "name": "Jim"},
"Willkommen Jim, 4 guten Resultate",
)
self.assertEqual(
complex_context_deferred % {"name": "Jim", "num": 1},
"Willkommen Jim, 1 gutes Resultat",
)
self.assertEqual(
complex_context_deferred % {"name": "Jim", "num": 5},
"Willkommen Jim, 5 guten Resultate",
)
with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"):
complex_context_deferred % {"name": "Jim"}
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_ngettext_lazy_format_style(self):
simple_with_format = ngettext_lazy("{} good result", "{} good results")
simple_context_with_format = npgettext_lazy(
"Exclamation", "{} good result", "{} good results"
)
with translation.override("de"):
self.assertEqual(simple_with_format.format(1), "1 gutes Resultat")
self.assertEqual(simple_with_format.format(4), "4 guten Resultate")
self.assertEqual(simple_context_with_format.format(1), "1 gutes Resultat!")
self.assertEqual(simple_context_with_format.format(4), "4 guten Resultate!")
complex_nonlazy = ngettext_lazy(
"Hi {name}, {num} good result", "Hi {name}, {num} good results", 4
)
complex_deferred = ngettext_lazy(
"Hi {name}, {num} good result", "Hi {name}, {num} good results", "num"
)
complex_context_nonlazy = npgettext_lazy(
"Greeting",
"Hi {name}, {num} good result",
"Hi {name}, {num} good results",
4,
)
complex_context_deferred = npgettext_lazy(
"Greeting",
"Hi {name}, {num} good result",
"Hi {name}, {num} good results",
"num",
)
with translation.override("de"):
self.assertEqual(
complex_nonlazy.format(num=4, name="Jim"),
"Hallo Jim, 4 guten Resultate",
)
self.assertEqual(
complex_deferred.format(name="Jim", num=1),
"Hallo Jim, 1 gutes Resultat",
)
self.assertEqual(
complex_deferred.format(name="Jim", num=5),
"Hallo Jim, 5 guten Resultate",
)
with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"):
complex_deferred.format(name="Jim")
self.assertEqual(
complex_context_nonlazy.format(num=4, name="Jim"),
"Willkommen Jim, 4 guten Resultate",
)
self.assertEqual(
complex_context_deferred.format(name="Jim", num=1),
"Willkommen Jim, 1 gutes Resultat",
)
self.assertEqual(
complex_context_deferred.format(name="Jim", num=5),
"Willkommen Jim, 5 guten Resultate",
)
with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"):
complex_context_deferred.format(name="Jim")
def test_ngettext_lazy_bool(self):
self.assertTrue(ngettext_lazy("%d good result", "%d good results"))
self.assertFalse(ngettext_lazy("", ""))
def test_ngettext_lazy_pickle(self):
s1 = ngettext_lazy("%d good result", "%d good results")
self.assertEqual(s1 % 1, "1 good result")
self.assertEqual(s1 % 8, "8 good results")
s2 = pickle.loads(pickle.dumps(s1))
self.assertEqual(s2 % 1, "1 good result")
self.assertEqual(s2 % 8, "8 good results")
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_pgettext(self):
trans_real._active = Local()
trans_real._translations = {}
with translation.override("de"):
self.assertEqual(pgettext("unexisting", "May"), "May")
self.assertEqual(pgettext("month name", "May"), "Mai")
self.assertEqual(pgettext("verb", "May"), "Kann")
self.assertEqual(
npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate"
)
def test_empty_value(self):
"""Empty value must stay empty after being translated (#23196)."""
with translation.override("de"):
self.assertEqual("", gettext(""))
s = mark_safe("")
self.assertEqual(s, gettext(s))
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_safe_status(self):
"""
Translating a string requiring no auto-escaping with gettext or pgettext
shouldn't change the "safe" status.
"""
trans_real._active = Local()
trans_real._translations = {}
s1 = mark_safe("Password")
s2 = mark_safe("May")
with translation.override("de", deactivate=True):
self.assertIs(type(gettext(s1)), SafeString)
self.assertIs(type(pgettext("month name", s2)), SafeString)
self.assertEqual("aPassword", SafeString("a") + s1)
self.assertEqual("Passworda", s1 + SafeString("a"))
self.assertEqual("Passworda", s1 + mark_safe("a"))
self.assertEqual("aPassword", mark_safe("a") + s1)
self.assertEqual("as", mark_safe("a") + mark_safe("s"))
def test_maclines(self):
"""
Translations on files with Mac or DOS end of lines will be converted
to unix EOF in .po catalogs.
"""
ca_translation = trans_real.translation("ca")
ca_translation._catalog["Mac\nEOF\n"] = "Catalan Mac\nEOF\n"
ca_translation._catalog["Win\nEOF\n"] = "Catalan Win\nEOF\n"
with translation.override("ca", deactivate=True):
self.assertEqual("Catalan Mac\nEOF\n", gettext("Mac\rEOF\r"))
self.assertEqual("Catalan Win\nEOF\n", gettext("Win\r\nEOF\r\n"))
def test_to_locale(self):
tests = (
("en", "en"),
("EN", "en"),
("en-us", "en_US"),
("EN-US", "en_US"),
("en_US", "en_US"),
# With > 2 characters after the dash.
("sr-latn", "sr_Latn"),
("sr-LATN", "sr_Latn"),
("sr_Latn", "sr_Latn"),
# 3-char language codes.
("ber-MA", "ber_MA"),
("BER-MA", "ber_MA"),
("BER_MA", "ber_MA"),
("ber_MA", "ber_MA"),
# With private use subtag (x-informal).
("nl-nl-x-informal", "nl_NL-x-informal"),
("NL-NL-X-INFORMAL", "nl_NL-x-informal"),
("sr-latn-x-informal", "sr_Latn-x-informal"),
("SR-LATN-X-INFORMAL", "sr_Latn-x-informal"),
)
for lang, locale in tests:
with self.subTest(lang=lang):
self.assertEqual(to_locale(lang), locale)
def test_to_language(self):
self.assertEqual(to_language("en_US"), "en-us")
self.assertEqual(to_language("sr_Lat"), "sr-lat")
def test_language_bidi(self):
self.assertIs(get_language_bidi(), False)
with translation.override(None):
self.assertIs(get_language_bidi(), False)
def test_language_bidi_null(self):
self.assertIs(trans_null.get_language_bidi(), False)
with override_settings(LANGUAGE_CODE="he"):
self.assertIs(get_language_bidi(), True)
class TranslationLoadingTests(SimpleTestCase):
def setUp(self):
"""Clear translation state."""
self._old_language = get_language()
self._old_translations = trans_real._translations
deactivate()
trans_real._translations = {}
def tearDown(self):
trans_real._translations = self._old_translations
activate(self._old_language)
@override_settings(
USE_I18N=True,
LANGUAGE_CODE="en",
LANGUAGES=[
("en", "English"),
("en-ca", "English (Canada)"),
("en-nz", "English (New Zealand)"),
("en-au", "English (Australia)"),
],
LOCALE_PATHS=[os.path.join(here, "loading")],
INSTALLED_APPS=["i18n.loading_app"],
)
def test_translation_loading(self):
"""
"loading_app" does not have translations for all languages provided by
"loading". Catalogs are merged correctly.
"""
tests = [
("en", "local country person"),
("en_AU", "aussie"),
("en_NZ", "kiwi"),
("en_CA", "canuck"),
]
# Load all relevant translations.
for language, _ in tests:
activate(language)
# Catalogs are merged correctly.
for language, nickname in tests:
with self.subTest(language=language):
activate(language)
self.assertEqual(gettext("local country person"), nickname)
class TranslationThreadSafetyTests(SimpleTestCase):
def setUp(self):
self._old_language = get_language()
self._translations = trans_real._translations
# here we rely on .split() being called inside the _fetch()
# in trans_real.translation()
class sideeffect_str(str):
def split(self, *args, **kwargs):
res = str.split(self, *args, **kwargs)
trans_real._translations["en-YY"] = None
return res
trans_real._translations = {sideeffect_str("en-XX"): None}
def tearDown(self):
trans_real._translations = self._translations
activate(self._old_language)
def test_bug14894_translation_activate_thread_safety(self):
translation_count = len(trans_real._translations)
# May raise RuntimeError if translation.activate() isn't thread-safe.
translation.activate("pl")
# make sure sideeffect_str actually added a new translation
self.assertLess(translation_count, len(trans_real._translations))
class FormattingTests(SimpleTestCase):
def setUp(self):
super().setUp()
self.n = decimal.Decimal("66666.666")
self.f = 99999.999
self.d = datetime.date(2009, 12, 31)
self.dt = datetime.datetime(2009, 12, 31, 20, 50)
self.t = datetime.time(10, 15, 48)
self.long = 10000
self.ctxt = Context(
{
"n": self.n,
"t": self.t,
"d": self.d,
"dt": self.dt,
"f": self.f,
"l": self.long,
}
)
def test_all_format_strings(self):
all_locales = LANG_INFO.keys()
some_date = datetime.date(2017, 10, 14)
some_datetime = datetime.datetime(2017, 10, 14, 10, 23)
for locale in all_locales:
with self.subTest(locale=locale), translation.override(locale):
self.assertIn(
"2017", date_format(some_date)
) # Uses DATE_FORMAT by default
self.assertIn(
"23", time_format(some_datetime)
) # Uses TIME_FORMAT by default
self.assertIn("2017", date_format(some_datetime, "DATETIME_FORMAT"))
self.assertIn("2017", date_format(some_date, "YEAR_MONTH_FORMAT"))
self.assertIn("14", date_format(some_date, "MONTH_DAY_FORMAT"))
self.assertIn("2017", date_format(some_date, "SHORT_DATE_FORMAT"))
self.assertIn(
"2017",
date_format(some_datetime, "SHORT_DATETIME_FORMAT"),
)
def test_locale_independent(self):
"""
Localization of numbers
"""
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual(
"66666.66",
nformat(
self.n, decimal_sep=".", decimal_pos=2, grouping=3, thousand_sep=","
),
)
self.assertEqual(
"66666A6",
nformat(
self.n, decimal_sep="A", decimal_pos=1, grouping=1, thousand_sep="B"
),
)
self.assertEqual(
"66666",
nformat(
self.n, decimal_sep="X", decimal_pos=0, grouping=1, thousand_sep="Y"
),
)
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual(
"66,666.66",
nformat(
self.n, decimal_sep=".", decimal_pos=2, grouping=3, thousand_sep=","
),
)
self.assertEqual(
"6B6B6B6B6A6",
nformat(
self.n, decimal_sep="A", decimal_pos=1, grouping=1, thousand_sep="B"
),
)
self.assertEqual(
"-66666.6", nformat(-66666.666, decimal_sep=".", decimal_pos=1)
)
self.assertEqual(
"-66666.0", nformat(int("-66666"), decimal_sep=".", decimal_pos=1)
)
self.assertEqual(
"10000.0", nformat(self.long, decimal_sep=".", decimal_pos=1)
)
self.assertEqual(
"10,00,00,000.00",
nformat(
100000000.00,
decimal_sep=".",
decimal_pos=2,
grouping=(3, 2, 0),
thousand_sep=",",
),
)
self.assertEqual(
"1,0,00,000,0000.00",
nformat(
10000000000.00,
decimal_sep=".",
decimal_pos=2,
grouping=(4, 3, 2, 1, 0),
thousand_sep=",",
),
)
self.assertEqual(
"10000,00,000.00",
nformat(
1000000000.00,
decimal_sep=".",
decimal_pos=2,
grouping=(3, 2, -1),
thousand_sep=",",
),
)
# This unusual grouping/force_grouping combination may be triggered
# by the intcomma filter.
self.assertEqual(
"10000",
nformat(
self.long,
decimal_sep=".",
decimal_pos=0,
grouping=0,
force_grouping=True,
),
)
# date filter
self.assertEqual(
"31.12.2009 в 20:50",
Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt),
)
self.assertEqual(
"⌚ 10:15", Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt)
)
@ignore_warnings(category=RemovedInDjango50Warning)
@override_settings(USE_L10N=False)
def test_l10n_disabled(self):
"""
Catalan locale with format i18n disabled translations will be used,
but not formats
"""
with translation.override("ca", deactivate=True):
self.maxDiff = 3000
self.assertEqual("N j, Y", get_format("DATE_FORMAT"))
self.assertEqual(0, get_format("FIRST_DAY_OF_WEEK"))
self.assertEqual(".", get_format("DECIMAL_SEPARATOR"))
self.assertEqual("10:15 a.m.", time_format(self.t))
self.assertEqual("Des. 31, 2009", date_format(self.d))
self.assertEqual("desembre 2009", date_format(self.d, "YEAR_MONTH_FORMAT"))
self.assertEqual(
"12/31/2009 8:50 p.m.", date_format(self.dt, "SHORT_DATETIME_FORMAT")
)
self.assertEqual("No localizable", localize("No localizable"))
self.assertEqual("66666.666", localize(self.n))
self.assertEqual("99999.999", localize(self.f))
self.assertEqual("10000", localize(self.long))
self.assertEqual("Des. 31, 2009", localize(self.d))
self.assertEqual("Des. 31, 2009, 8:50 p.m.", localize(self.dt))
self.assertEqual("66666.666", Template("{{ n }}").render(self.ctxt))
self.assertEqual("99999.999", Template("{{ f }}").render(self.ctxt))
self.assertEqual("Des. 31, 2009", Template("{{ d }}").render(self.ctxt))
self.assertEqual(
"Des. 31, 2009, 8:50 p.m.", Template("{{ dt }}").render(self.ctxt)
)
self.assertEqual(
"66666.67", Template('{{ n|floatformat:"2u" }}').render(self.ctxt)
)
self.assertEqual(
"100000.0", Template('{{ f|floatformat:"u" }}').render(self.ctxt)
)
self.assertEqual(
"66666.67",
Template('{{ n|floatformat:"2gu" }}').render(self.ctxt),
)
self.assertEqual(
"100000.0",
Template('{{ f|floatformat:"ug" }}').render(self.ctxt),
)
self.assertEqual(
"10:15 a.m.", Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)
)
self.assertEqual(
"12/31/2009",
Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt),
)
self.assertEqual(
"12/31/2009 8:50 p.m.",
Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt),
)
form = I18nForm(
{
"decimal_field": "66666,666",
"float_field": "99999,999",
"date_field": "31/12/2009",
"datetime_field": "31/12/2009 20:50",
"time_field": "20:50",
"integer_field": "1.234",
}
)
self.assertFalse(form.is_valid())
self.assertEqual(["Introdu\xefu un n\xfamero."], form.errors["float_field"])
self.assertEqual(
["Introdu\xefu un n\xfamero."], form.errors["decimal_field"]
)
self.assertEqual(
["Introdu\xefu una data v\xe0lida."], form.errors["date_field"]
)
self.assertEqual(
["Introdu\xefu una data/hora v\xe0lides."],
form.errors["datetime_field"],
)
self.assertEqual(
["Introdu\xefu un n\xfamero enter."], form.errors["integer_field"]
)
form2 = SelectDateForm(
{
"date_field_month": "12",
"date_field_day": "31",
"date_field_year": "2009",
}
)
self.assertTrue(form2.is_valid())
self.assertEqual(
datetime.date(2009, 12, 31), form2.cleaned_data["date_field"]
)
self.assertHTMLEqual(
'<select name="mydate_month" id="id_mydate_month">'
'<option value="">---</option>'
'<option value="1">gener</option>'
'<option value="2">febrer</option>'
'<option value="3">mar\xe7</option>'
'<option value="4">abril</option>'
'<option value="5">maig</option>'
'<option value="6">juny</option>'
'<option value="7">juliol</option>'
'<option value="8">agost</option>'
'<option value="9">setembre</option>'
'<option value="10">octubre</option>'
'<option value="11">novembre</option>'
'<option value="12" selected>desembre</option>'
"</select>"
'<select name="mydate_day" id="id_mydate_day">'
'<option value="">---</option>'
'<option value="1">1</option>'
'<option value="2">2</option>'
'<option value="3">3</option>'
'<option value="4">4</option>'
'<option value="5">5</option>'
'<option value="6">6</option>'
'<option value="7">7</option>'
'<option value="8">8</option>'
'<option value="9">9</option>'
'<option value="10">10</option>'
'<option value="11">11</option>'
'<option value="12">12</option>'
'<option value="13">13</option>'
'<option value="14">14</option>'
'<option value="15">15</option>'
'<option value="16">16</option>'
'<option value="17">17</option>'
'<option value="18">18</option>'
'<option value="19">19</option>'
'<option value="20">20</option>'
'<option value="21">21</option>'
'<option value="22">22</option>'
'<option value="23">23</option>'
'<option value="24">24</option>'
'<option value="25">25</option>'
'<option value="26">26</option>'
'<option value="27">27</option>'
'<option value="28">28</option>'
'<option value="29">29</option>'
'<option value="30">30</option>'
'<option value="31" selected>31</option>'
"</select>"
'<select name="mydate_year" id="id_mydate_year">'
'<option value="">---</option>'
'<option value="2009" selected>2009</option>'
'<option value="2010">2010</option>'
'<option value="2011">2011</option>'
'<option value="2012">2012</option>'
'<option value="2013">2013</option>'
'<option value="2014">2014</option>'
'<option value="2015">2015</option>'
'<option value="2016">2016</option>'
'<option value="2017">2017</option>'
'<option value="2018">2018</option>'
"</select>",
forms.SelectDateWidget(years=range(2009, 2019)).render(
"mydate", datetime.date(2009, 12, 31)
),
)
# We shouldn't change the behavior of the floatformat filter re:
# thousand separator and grouping when localization is disabled
# even if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and
# THOUSAND_SEPARATOR settings are specified.
with self.settings(
USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=1, THOUSAND_SEPARATOR="!"
):
self.assertEqual(
"66666.67", Template('{{ n|floatformat:"2u" }}').render(self.ctxt)
)
self.assertEqual(
"100000.0", Template('{{ f|floatformat:"u" }}').render(self.ctxt)
)
def test_false_like_locale_formats(self):
"""
The active locale's formats take precedence over the default settings
even if they would be interpreted as False in a conditional test
(e.g. 0 or empty string) (#16938).
"""
with translation.override("fr"):
with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR="!"):
self.assertEqual("\xa0", get_format("THOUSAND_SEPARATOR"))
# Even a second time (after the format has been cached)...
self.assertEqual("\xa0", get_format("THOUSAND_SEPARATOR"))
with self.settings(FIRST_DAY_OF_WEEK=0):
self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK"))
# Even a second time (after the format has been cached)...
self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK"))
def test_l10n_enabled(self):
self.maxDiff = 3000
# Catalan locale
with translation.override("ca", deactivate=True):
self.assertEqual(r"j E \d\e Y", get_format("DATE_FORMAT"))
self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK"))
self.assertEqual(",", get_format("DECIMAL_SEPARATOR"))
self.assertEqual("10:15", time_format(self.t))
self.assertEqual("31 desembre de 2009", date_format(self.d))
self.assertEqual("1 abril de 2009", date_format(datetime.date(2009, 4, 1)))
self.assertEqual(
"desembre del 2009", date_format(self.d, "YEAR_MONTH_FORMAT")
)
self.assertEqual(
"31/12/2009 20:50", date_format(self.dt, "SHORT_DATETIME_FORMAT")
)
self.assertEqual("No localizable", localize("No localizable"))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual("66.666,666", localize(self.n))
self.assertEqual("99.999,999", localize(self.f))
self.assertEqual("10.000", localize(self.long))
self.assertEqual("True", localize(True))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual("66666,666", localize(self.n))
self.assertEqual("99999,999", localize(self.f))
self.assertEqual("10000", localize(self.long))
self.assertEqual("31 desembre de 2009", localize(self.d))
self.assertEqual("31 desembre de 2009 a les 20:50", localize(self.dt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual("66.666,666", Template("{{ n }}").render(self.ctxt))
self.assertEqual("99.999,999", Template("{{ f }}").render(self.ctxt))
self.assertEqual("10.000", Template("{{ l }}").render(self.ctxt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
form3 = I18nForm(
{
"decimal_field": "66.666,666",
"float_field": "99.999,999",
"date_field": "31/12/2009",
"datetime_field": "31/12/2009 20:50",
"time_field": "20:50",
"integer_field": "1.234",
}
)
self.assertTrue(form3.is_valid())
self.assertEqual(
decimal.Decimal("66666.666"), form3.cleaned_data["decimal_field"]
)
self.assertEqual(99999.999, form3.cleaned_data["float_field"])
self.assertEqual(
datetime.date(2009, 12, 31), form3.cleaned_data["date_field"]
)
self.assertEqual(
datetime.datetime(2009, 12, 31, 20, 50),
form3.cleaned_data["datetime_field"],
)
self.assertEqual(
datetime.time(20, 50), form3.cleaned_data["time_field"]
)
self.assertEqual(1234, form3.cleaned_data["integer_field"])
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual("66666,666", Template("{{ n }}").render(self.ctxt))
self.assertEqual("99999,999", Template("{{ f }}").render(self.ctxt))
self.assertEqual(
"31 desembre de 2009", Template("{{ d }}").render(self.ctxt)
)
self.assertEqual(
"31 desembre de 2009 a les 20:50",
Template("{{ dt }}").render(self.ctxt),
)
self.assertEqual(
"66666,67", Template("{{ n|floatformat:2 }}").render(self.ctxt)
)
self.assertEqual(
"100000,0", Template("{{ f|floatformat }}").render(self.ctxt)
)
self.assertEqual(
"66.666,67",
Template('{{ n|floatformat:"2g" }}').render(self.ctxt),
)
self.assertEqual(
"100.000,0",
Template('{{ f|floatformat:"g" }}').render(self.ctxt),
)
self.assertEqual(
"10:15", Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)
)
self.assertEqual(
"31/12/2009",
Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt),
)
self.assertEqual(
"31/12/2009 20:50",
Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt),
)
self.assertEqual(
date_format(datetime.datetime.now()),
Template('{% now "DATE_FORMAT" %}').render(self.ctxt),
)
with self.settings(USE_THOUSAND_SEPARATOR=False):
form4 = I18nForm(
{
"decimal_field": "66666,666",
"float_field": "99999,999",
"date_field": "31/12/2009",
"datetime_field": "31/12/2009 20:50",
"time_field": "20:50",
"integer_field": "1234",
}
)
self.assertTrue(form4.is_valid())
self.assertEqual(
decimal.Decimal("66666.666"), form4.cleaned_data["decimal_field"]
)
self.assertEqual(99999.999, form4.cleaned_data["float_field"])
self.assertEqual(
datetime.date(2009, 12, 31), form4.cleaned_data["date_field"]
)
self.assertEqual(
datetime.datetime(2009, 12, 31, 20, 50),
form4.cleaned_data["datetime_field"],
)
self.assertEqual(
datetime.time(20, 50), form4.cleaned_data["time_field"]
)
self.assertEqual(1234, form4.cleaned_data["integer_field"])
form5 = SelectDateForm(
{
"date_field_month": "12",
"date_field_day": "31",
"date_field_year": "2009",
}
)
self.assertTrue(form5.is_valid())
self.assertEqual(
datetime.date(2009, 12, 31), form5.cleaned_data["date_field"]
)
self.assertHTMLEqual(
'<select name="mydate_day" id="id_mydate_day">'
'<option value="">---</option>'
'<option value="1">1</option>'
'<option value="2">2</option>'
'<option value="3">3</option>'
'<option value="4">4</option>'
'<option value="5">5</option>'
'<option value="6">6</option>'
'<option value="7">7</option>'
'<option value="8">8</option>'
'<option value="9">9</option>'
'<option value="10">10</option>'
'<option value="11">11</option>'
'<option value="12">12</option>'
'<option value="13">13</option>'
'<option value="14">14</option>'
'<option value="15">15</option>'
'<option value="16">16</option>'
'<option value="17">17</option>'
'<option value="18">18</option>'
'<option value="19">19</option>'
'<option value="20">20</option>'
'<option value="21">21</option>'
'<option value="22">22</option>'
'<option value="23">23</option>'
'<option value="24">24</option>'
'<option value="25">25</option>'
'<option value="26">26</option>'
'<option value="27">27</option>'
'<option value="28">28</option>'
'<option value="29">29</option>'
'<option value="30">30</option>'
'<option value="31" selected>31</option>'
"</select>"
'<select name="mydate_month" id="id_mydate_month">'
'<option value="">---</option>'
'<option value="1">gener</option>'
'<option value="2">febrer</option>'
'<option value="3">mar\xe7</option>'
'<option value="4">abril</option>'
'<option value="5">maig</option>'
'<option value="6">juny</option>'
'<option value="7">juliol</option>'
'<option value="8">agost</option>'
'<option value="9">setembre</option>'
'<option value="10">octubre</option>'
'<option value="11">novembre</option>'
'<option value="12" selected>desembre</option>'
"</select>"
'<select name="mydate_year" id="id_mydate_year">'
'<option value="">---</option>'
'<option value="2009" selected>2009</option>'
'<option value="2010">2010</option>'
'<option value="2011">2011</option>'
'<option value="2012">2012</option>'
'<option value="2013">2013</option>'
'<option value="2014">2014</option>'
'<option value="2015">2015</option>'
'<option value="2016">2016</option>'
'<option value="2017">2017</option>'
'<option value="2018">2018</option>'
"</select>",
forms.SelectDateWidget(years=range(2009, 2019)).render(
"mydate", datetime.date(2009, 12, 31)
),
)
# Russian locale (with E as month)
with translation.override("ru", deactivate=True):
self.assertHTMLEqual(
'<select name="mydate_day" id="id_mydate_day">'
'<option value="">---</option>'
'<option value="1">1</option>'
'<option value="2">2</option>'
'<option value="3">3</option>'
'<option value="4">4</option>'
'<option value="5">5</option>'
'<option value="6">6</option>'
'<option value="7">7</option>'
'<option value="8">8</option>'
'<option value="9">9</option>'
'<option value="10">10</option>'
'<option value="11">11</option>'
'<option value="12">12</option>'
'<option value="13">13</option>'
'<option value="14">14</option>'
'<option value="15">15</option>'
'<option value="16">16</option>'
'<option value="17">17</option>'
'<option value="18">18</option>'
'<option value="19">19</option>'
'<option value="20">20</option>'
'<option value="21">21</option>'
'<option value="22">22</option>'
'<option value="23">23</option>'
'<option value="24">24</option>'
'<option value="25">25</option>'
'<option value="26">26</option>'
'<option value="27">27</option>'
'<option value="28">28</option>'
'<option value="29">29</option>'
'<option value="30">30</option>'
'<option value="31" selected>31</option>'
"</select>"
'<select name="mydate_month" id="id_mydate_month">'
'<option value="">---</option>'
'<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>'
'<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>'
'<option value="3">\u041c\u0430\u0440\u0442</option>'
'<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>'
'<option value="5">\u041c\u0430\u0439</option>'
'<option value="6">\u0418\u044e\u043d\u044c</option>'
'<option value="7">\u0418\u044e\u043b\u044c</option>'
'<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>'
'<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c'
"</option>"
'<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>'
'<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>'
'<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c'
"</option>"
"</select>"
'<select name="mydate_year" id="id_mydate_year">'
'<option value="">---</option>'
'<option value="2009" selected>2009</option>'
'<option value="2010">2010</option>'
'<option value="2011">2011</option>'
'<option value="2012">2012</option>'
'<option value="2013">2013</option>'
'<option value="2014">2014</option>'
'<option value="2015">2015</option>'
'<option value="2016">2016</option>'
'<option value="2017">2017</option>'
'<option value="2018">2018</option>'
"</select>",
forms.SelectDateWidget(years=range(2009, 2019)).render(
"mydate", datetime.date(2009, 12, 31)
),
)
# English locale
with translation.override("en", deactivate=True):
self.assertEqual("N j, Y", get_format("DATE_FORMAT"))
self.assertEqual(0, get_format("FIRST_DAY_OF_WEEK"))
self.assertEqual(".", get_format("DECIMAL_SEPARATOR"))
self.assertEqual("Dec. 31, 2009", date_format(self.d))
self.assertEqual("December 2009", date_format(self.d, "YEAR_MONTH_FORMAT"))
self.assertEqual(
"12/31/2009 8:50 p.m.", date_format(self.dt, "SHORT_DATETIME_FORMAT")
)
self.assertEqual("No localizable", localize("No localizable"))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual("66,666.666", localize(self.n))
self.assertEqual("99,999.999", localize(self.f))
self.assertEqual("10,000", localize(self.long))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual("66666.666", localize(self.n))
self.assertEqual("99999.999", localize(self.f))
self.assertEqual("10000", localize(self.long))
self.assertEqual("Dec. 31, 2009", localize(self.d))
self.assertEqual("Dec. 31, 2009, 8:50 p.m.", localize(self.dt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual("66,666.666", Template("{{ n }}").render(self.ctxt))
self.assertEqual("99,999.999", Template("{{ f }}").render(self.ctxt))
self.assertEqual("10,000", Template("{{ l }}").render(self.ctxt))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual("66666.666", Template("{{ n }}").render(self.ctxt))
self.assertEqual("99999.999", Template("{{ f }}").render(self.ctxt))
self.assertEqual("Dec. 31, 2009", Template("{{ d }}").render(self.ctxt))
self.assertEqual(
"Dec. 31, 2009, 8:50 p.m.", Template("{{ dt }}").render(self.ctxt)
)
self.assertEqual(
"66666.67", Template("{{ n|floatformat:2 }}").render(self.ctxt)
)
self.assertEqual(
"100000.0", Template("{{ f|floatformat }}").render(self.ctxt)
)
self.assertEqual(
"66,666.67",
Template('{{ n|floatformat:"2g" }}').render(self.ctxt),
)
self.assertEqual(
"100,000.0",
Template('{{ f|floatformat:"g" }}').render(self.ctxt),
)
self.assertEqual(
"12/31/2009",
Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt),
)
self.assertEqual(
"12/31/2009 8:50 p.m.",
Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt),
)
form5 = I18nForm(
{
"decimal_field": "66666.666",
"float_field": "99999.999",
"date_field": "12/31/2009",
"datetime_field": "12/31/2009 20:50",
"time_field": "20:50",
"integer_field": "1234",
}
)
self.assertTrue(form5.is_valid())
self.assertEqual(
decimal.Decimal("66666.666"), form5.cleaned_data["decimal_field"]
)
self.assertEqual(99999.999, form5.cleaned_data["float_field"])
self.assertEqual(
datetime.date(2009, 12, 31), form5.cleaned_data["date_field"]
)
self.assertEqual(
datetime.datetime(2009, 12, 31, 20, 50),
form5.cleaned_data["datetime_field"],
)
self.assertEqual(datetime.time(20, 50), form5.cleaned_data["time_field"])
self.assertEqual(1234, form5.cleaned_data["integer_field"])
form6 = SelectDateForm(
{
"date_field_month": "12",
"date_field_day": "31",
"date_field_year": "2009",
}
)
self.assertTrue(form6.is_valid())
self.assertEqual(
datetime.date(2009, 12, 31), form6.cleaned_data["date_field"]
)
self.assertHTMLEqual(
'<select name="mydate_month" id="id_mydate_month">'
'<option value="">---</option>'
'<option value="1">January</option>'
'<option value="2">February</option>'
'<option value="3">March</option>'
'<option value="4">April</option>'
'<option value="5">May</option>'
'<option value="6">June</option>'
'<option value="7">July</option>'
'<option value="8">August</option>'
'<option value="9">September</option>'
'<option value="10">October</option>'
'<option value="11">November</option>'
'<option value="12" selected>December</option>'
"</select>"
'<select name="mydate_day" id="id_mydate_day">'
'<option value="">---</option>'
'<option value="1">1</option>'
'<option value="2">2</option>'
'<option value="3">3</option>'
'<option value="4">4</option>'
'<option value="5">5</option>'
'<option value="6">6</option>'
'<option value="7">7</option>'
'<option value="8">8</option>'
'<option value="9">9</option>'
'<option value="10">10</option>'
'<option value="11">11</option>'
'<option value="12">12</option>'
'<option value="13">13</option>'
'<option value="14">14</option>'
'<option value="15">15</option>'
'<option value="16">16</option>'
'<option value="17">17</option>'
'<option value="18">18</option>'
'<option value="19">19</option>'
'<option value="20">20</option>'
'<option value="21">21</option>'
'<option value="22">22</option>'
'<option value="23">23</option>'
'<option value="24">24</option>'
'<option value="25">25</option>'
'<option value="26">26</option>'
'<option value="27">27</option>'
'<option value="28">28</option>'
'<option value="29">29</option>'
'<option value="30">30</option>'
'<option value="31" selected>31</option>'
"</select>"
'<select name="mydate_year" id="id_mydate_year">'
'<option value="">---</option>'
'<option value="2009" selected>2009</option>'
'<option value="2010">2010</option>'
'<option value="2011">2011</option>'
'<option value="2012">2012</option>'
'<option value="2013">2013</option>'
'<option value="2014">2014</option>'
'<option value="2015">2015</option>'
'<option value="2016">2016</option>'
'<option value="2017">2017</option>'
'<option value="2018">2018</option>'
"</select>",
forms.SelectDateWidget(years=range(2009, 2019)).render(
"mydate", datetime.date(2009, 12, 31)
),
)
def test_sub_locales(self):
"""
Check if sublocales fall back to the main locale
"""
with self.settings(USE_THOUSAND_SEPARATOR=True):
with translation.override("de-at", deactivate=True):
self.assertEqual("66.666,666", Template("{{ n }}").render(self.ctxt))
with translation.override("es-us", deactivate=True):
self.assertEqual("31 de diciembre de 2009", date_format(self.d))
def test_localized_input(self):
"""
Tests if form input is correctly localized
"""
self.maxDiff = 1200
with translation.override("de-at", deactivate=True):
form6 = CompanyForm(
{
"name": "acme",
"date_added": datetime.datetime(2009, 12, 31, 6, 0, 0),
"cents_paid": decimal.Decimal("59.47"),
"products_delivered": 12000,
}
)
self.assertTrue(form6.is_valid())
self.assertHTMLEqual(
form6.as_ul(),
'<li><label for="id_name">Name:</label>'
'<input id="id_name" type="text" name="name" value="acme" '
' maxlength="50" required></li>'
'<li><label for="id_date_added">Date added:</label>'
'<input type="text" name="date_added" value="31.12.2009 06:00:00" '
' id="id_date_added" required></li>'
'<li><label for="id_cents_paid">Cents paid:</label>'
'<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" '
" required></li>"
'<li><label for="id_products_delivered">Products delivered:</label>'
'<input type="text" name="products_delivered" value="12000" '
' id="id_products_delivered" required>'
"</li>",
)
self.assertEqual(
localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)),
"31.12.2009 06:00:00",
)
self.assertEqual(
datetime.datetime(2009, 12, 31, 6, 0, 0),
form6.cleaned_data["date_added"],
)
with self.settings(USE_THOUSAND_SEPARATOR=True):
# Checking for the localized "products_delivered" field
self.assertInHTML(
'<input type="text" name="products_delivered" '
'value="12.000" id="id_products_delivered" required>',
form6.as_ul(),
)
def test_localized_input_func(self):
tests = (
(True, "True"),
(datetime.date(1, 1, 1), "0001-01-01"),
(datetime.datetime(1, 1, 1), "0001-01-01 00:00:00"),
)
with self.settings(USE_THOUSAND_SEPARATOR=True):
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(localize_input(value), expected)
def test_sanitize_strftime_format(self):
for year in (1, 99, 999, 1000):
dt = datetime.date(year, 1, 1)
for fmt, expected in [
("%C", "%02d" % (year // 100)),
("%F", "%04d-01-01" % year),
("%G", "%04d" % year),
("%Y", "%04d" % year),
]:
with self.subTest(year=year, fmt=fmt):
fmt = sanitize_strftime_format(fmt)
self.assertEqual(dt.strftime(fmt), expected)
def test_sanitize_strftime_format_with_escaped_percent(self):
dt = datetime.date(1, 1, 1)
for fmt, expected in [
("%%C", "%C"),
("%%F", "%F"),
("%%G", "%G"),
("%%Y", "%Y"),
("%%%%C", "%%C"),
("%%%%F", "%%F"),
("%%%%G", "%%G"),
("%%%%Y", "%%Y"),
]:
with self.subTest(fmt=fmt):
fmt = sanitize_strftime_format(fmt)
self.assertEqual(dt.strftime(fmt), expected)
for year in (1, 99, 999, 1000):
dt = datetime.date(year, 1, 1)
for fmt, expected in [
("%%%C", "%%%02d" % (year // 100)),
("%%%F", "%%%04d-01-01" % year),
("%%%G", "%%%04d" % year),
("%%%Y", "%%%04d" % year),
("%%%%%C", "%%%%%02d" % (year // 100)),
("%%%%%F", "%%%%%04d-01-01" % year),
("%%%%%G", "%%%%%04d" % year),
("%%%%%Y", "%%%%%04d" % year),
]:
with self.subTest(year=year, fmt=fmt):
fmt = sanitize_strftime_format(fmt)
self.assertEqual(dt.strftime(fmt), expected)
def test_sanitize_separators(self):
"""
Tests django.utils.formats.sanitize_separators.
"""
# Non-strings are untouched
self.assertEqual(sanitize_separators(123), 123)
with translation.override("ru", deactivate=True):
# Russian locale has non-breaking space (\xa0) as thousand separator
# Usual space is accepted too when sanitizing inputs
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual(sanitize_separators("1\xa0234\xa0567"), "1234567")
self.assertEqual(sanitize_separators("77\xa0777,777"), "77777.777")
self.assertEqual(sanitize_separators("12 345"), "12345")
self.assertEqual(sanitize_separators("77 777,777"), "77777.777")
with translation.override(None): # RemovedInDjango50Warning
with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR="."):
self.assertEqual(sanitize_separators("12\xa0345"), "12\xa0345")
with self.settings(USE_THOUSAND_SEPARATOR=True):
with patch_formats(
get_language(), THOUSAND_SEPARATOR=".", DECIMAL_SEPARATOR=","
):
self.assertEqual(sanitize_separators("10.234"), "10234")
# Suspicion that user entered dot as decimal separator (#22171)
self.assertEqual(sanitize_separators("10.10"), "10.10")
# RemovedInDjango50Warning: When the deprecation ends, remove
# @ignore_warnings and USE_L10N=False. The assertions should remain
# because format-related settings will take precedence over
# locale-dictated formats.
with ignore_warnings(category=RemovedInDjango50Warning):
with self.settings(USE_L10N=False):
with self.settings(DECIMAL_SEPARATOR=","):
self.assertEqual(sanitize_separators("1001,10"), "1001.10")
self.assertEqual(sanitize_separators("1001.10"), "1001.10")
with self.settings(
DECIMAL_SEPARATOR=",",
THOUSAND_SEPARATOR=".",
USE_THOUSAND_SEPARATOR=True,
):
self.assertEqual(sanitize_separators("1.001,10"), "1001.10")
self.assertEqual(sanitize_separators("1001,10"), "1001.10")
self.assertEqual(sanitize_separators("1001.10"), "1001.10")
# Invalid output.
self.assertEqual(sanitize_separators("1,001.10"), "1.001.10")
def test_iter_format_modules(self):
"""
Tests the iter_format_modules function.
"""
# Importing some format modules so that we can compare the returned
# modules with these expected modules
default_mod = import_module("django.conf.locale.de.formats")
test_mod = import_module("i18n.other.locale.de.formats")
test_mod2 = import_module("i18n.other2.locale.de.formats")
with translation.override("de-at", deactivate=True):
# Should return the correct default module when no setting is set
self.assertEqual(list(iter_format_modules("de")), [default_mod])
# When the setting is a string, should return the given module and
# the default module
self.assertEqual(
list(iter_format_modules("de", "i18n.other.locale")),
[test_mod, default_mod],
)
# When setting is a list of strings, should return the given
# modules and the default module
self.assertEqual(
list(
iter_format_modules(
"de", ["i18n.other.locale", "i18n.other2.locale"]
)
),
[test_mod, test_mod2, default_mod],
)
def test_iter_format_modules_stability(self):
"""
Tests the iter_format_modules function always yields format modules in
a stable and correct order in presence of both base ll and ll_CC formats.
"""
en_format_mod = import_module("django.conf.locale.en.formats")
en_gb_format_mod = import_module("django.conf.locale.en_GB.formats")
self.assertEqual(
list(iter_format_modules("en-gb")), [en_gb_format_mod, en_format_mod]
)
def test_get_format_modules_lang(self):
with translation.override("de", deactivate=True):
self.assertEqual(".", get_format("DECIMAL_SEPARATOR", lang="en"))
def test_get_format_lazy_format(self):
self.assertEqual(get_format(gettext_lazy("DATE_FORMAT")), "N j, Y")
def test_localize_templatetag_and_filter(self):
"""
Test the {% localize %} templatetag and the localize/unlocalize filters.
"""
context = Context(
{"int": 1455, "float": 3.14, "date": datetime.date(2016, 12, 31)}
)
template1 = Template(
"{% load l10n %}{% localize %}"
"{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; "
"{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}"
)
template2 = Template(
"{% load l10n %}{{ int }}/{{ float }}/{{ date }}; "
"{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} "
"{{ int }}/{{ float }}/{{ date }}"
)
template3 = Template(
"{% load l10n %}{{ int }}/{{ float }}/{{ date }}; "
"{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}"
)
template4 = Template(
"{% load l10n %}{{ int }}/{{ float }}/{{ date }}; "
"{{ int|localize }}/{{ float|localize }}/{{ date|localize }}"
)
expected_localized = "1.455/3,14/31. Dezember 2016"
expected_unlocalized = "1455/3.14/Dez. 31, 2016"
output1 = "; ".join([expected_localized, expected_localized])
output2 = "; ".join(
[expected_localized, expected_unlocalized, expected_localized]
)
output3 = "; ".join([expected_localized, expected_unlocalized])
output4 = "; ".join([expected_unlocalized, expected_localized])
with translation.override("de", deactivate=True):
# RemovedInDjango50Warning: When the deprecation ends, remove
# @ignore_warnings and USE_L10N=False. The assertions should remain
# because format-related settings will take precedence over
# locale-dictated formats.
with ignore_warnings(category=RemovedInDjango50Warning):
with self.settings(
USE_L10N=False,
DATE_FORMAT="N j, Y",
DECIMAL_SEPARATOR=".",
NUMBER_GROUPING=0,
USE_THOUSAND_SEPARATOR=True,
):
self.assertEqual(template1.render(context), output1)
self.assertEqual(template4.render(context), output4)
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual(template1.render(context), output1)
self.assertEqual(template2.render(context), output2)
self.assertEqual(template3.render(context), output3)
def test_localized_off_numbers(self):
"""A string representation is returned for unlocalized numbers."""
template = Template(
"{% load l10n %}{% localize off %}"
"{{ int }}/{{ float }}/{{ decimal }}{% endlocalize %}"
)
context = Context(
{"int": 1455, "float": 3.14, "decimal": decimal.Decimal("24.1567")}
)
with self.settings(
DECIMAL_SEPARATOR=",",
USE_THOUSAND_SEPARATOR=True,
THOUSAND_SEPARATOR="°",
NUMBER_GROUPING=2,
):
self.assertEqual(template.render(context), "1455/3.14/24.1567")
# RemovedInDjango50Warning.
with ignore_warnings(category=RemovedInDjango50Warning):
with self.settings(
USE_L10N=False,
DECIMAL_SEPARATOR=",",
USE_THOUSAND_SEPARATOR=True,
THOUSAND_SEPARATOR="°",
NUMBER_GROUPING=2,
):
self.assertEqual(template.render(context), "1455/3.14/24.1567")
def test_localized_as_text_as_hidden_input(self):
"""
Form input with 'as_hidden' or 'as_text' is correctly localized.
"""
self.maxDiff = 1200
with translation.override("de-at", deactivate=True):
template = Template(
"{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}"
)
template_as_text = Template(
"{% load l10n %}"
"{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}"
)
template_as_hidden = Template(
"{% load l10n %}"
"{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}"
)
form = CompanyForm(
{
"name": "acme",
"date_added": datetime.datetime(2009, 12, 31, 6, 0, 0),
"cents_paid": decimal.Decimal("59.47"),
"products_delivered": 12000,
}
)
context = Context({"form": form})
self.assertTrue(form.is_valid())
self.assertHTMLEqual(
template.render(context),
'<input id="id_date_added" name="date_added" type="text" '
'value="31.12.2009 06:00:00" required>;'
'<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" '
"required>",
)
self.assertHTMLEqual(
template_as_text.render(context),
'<input id="id_date_added" name="date_added" type="text" '
'value="31.12.2009 06:00:00" required>;'
'<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" '
"required>",
)
self.assertHTMLEqual(
template_as_hidden.render(context),
'<input id="id_date_added" name="date_added" type="hidden" '
'value="31.12.2009 06:00:00">;'
'<input id="id_cents_paid" name="cents_paid" type="hidden" '
'value="59,47">',
)
def test_format_arbitrary_settings(self):
self.assertEqual(get_format("DEBUG"), "DEBUG")
def test_get_custom_format(self):
reset_format_cache()
with self.settings(FORMAT_MODULE_PATH="i18n.other.locale"):
with translation.override("fr", deactivate=True):
self.assertEqual("d/m/Y CUSTOM", get_format("CUSTOM_DAY_FORMAT"))
def test_admin_javascript_supported_input_formats(self):
"""
The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and
DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by
the admin's time picker widget.
"""
regex = re.compile("%([^BcdHImMpSwxXyY%])")
for language_code, language_name in settings.LANGUAGES:
for format_name in (
"DATE_INPUT_FORMATS",
"TIME_INPUT_FORMATS",
"DATETIME_INPUT_FORMATS",
):
with self.subTest(language=language_code, format=format_name):
formatter = get_format(format_name, lang=language_code)[0]
self.assertEqual(
regex.findall(formatter),
[],
"%s locale's %s uses an unsupported format code."
% (language_code, format_name),
)
class MiscTests(SimpleTestCase):
rf = RequestFactory()
@override_settings(LANGUAGE_CODE="de")
def test_english_fallback(self):
"""
With a non-English LANGUAGE_CODE and if the active language is English
or one of its variants, the untranslated string should be returned
(instead of falling back to LANGUAGE_CODE) (See #24413).
"""
self.assertEqual(gettext("Image"), "Bild")
with translation.override("en"):
self.assertEqual(gettext("Image"), "Image")
with translation.override("en-us"):
self.assertEqual(gettext("Image"), "Image")
with translation.override("en-ca"):
self.assertEqual(gettext("Image"), "Image")
def test_parse_spec_http_header(self):
"""
Testing HTTP header parsing. First, we test that we can parse the
values according to the spec (and that we extract all the pieces in
the right order).
"""
tests = [
# Good headers
("de", [("de", 1.0)]),
("en-AU", [("en-au", 1.0)]),
("es-419", [("es-419", 1.0)]),
("*;q=1.00", [("*", 1.0)]),
("en-AU;q=0.123", [("en-au", 0.123)]),
("en-au;q=0.5", [("en-au", 0.5)]),
("en-au;q=1.0", [("en-au", 1.0)]),
("da, en-gb;q=0.25, en;q=0.5", [("da", 1.0), ("en", 0.5), ("en-gb", 0.25)]),
("en-au-xx", [("en-au-xx", 1.0)]),
(
"de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125",
[
("de", 1.0),
("en-au", 0.75),
("en-us", 0.5),
("en", 0.25),
("es", 0.125),
("fa", 0.125),
],
),
("*", [("*", 1.0)]),
("de;q=0.", [("de", 0.0)]),
("en; q=1,", [("en", 1.0)]),
("en; q=1.0, * ; q=0.5", [("en", 1.0), ("*", 0.5)]),
# Bad headers
("en-gb;q=1.0000", []),
("en;q=0.1234", []),
("en;q=.2", []),
("abcdefghi-au", []),
("**", []),
("en,,gb", []),
("en-au;q=0.1.0", []),
(("X" * 97) + "Z,en", []),
("da, en-gb;q=0.8, en;q=0.7,#", []),
("de;q=2.0", []),
("de;q=0.a", []),
("12-345", []),
("", []),
("en;q=1e0", []),
("en-au;q=1.0", []),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(
trans_real.parse_accept_lang_header(value), tuple(expected)
)
def test_parse_literal_http_header(self):
tests = [
("pt-br", "pt-br"),
("pt", "pt"),
("es,de", "es"),
("es-a,de", "es"),
# There isn't a Django translation to a US variation of the Spanish
# language, a safe assumption. When the user sets it as the
# preferred language, the main 'es' translation should be selected
# instead.
("es-us", "es"),
# There isn't a main language (zh) translation of Django but there
# is a translation to variation (zh-hans) the user sets zh-hans as
# the preferred language, it should be selected without falling
# back nor ignoring it.
("zh-hans,de", "zh-hans"),
("NL", "nl"),
("fy", "fy"),
("ia", "ia"),
("sr-latn", "sr-latn"),
("zh-hans", "zh-hans"),
("zh-hant", "zh-hant"),
]
for header, expected in tests:
with self.subTest(header=header):
request = self.rf.get("/", headers={"accept-language": header})
self.assertEqual(get_language_from_request(request), expected)
@override_settings(
LANGUAGES=[
("en", "English"),
("zh-hans", "Simplified Chinese"),
("zh-hant", "Traditional Chinese"),
]
)
def test_support_for_deprecated_chinese_language_codes(self):
"""
Some browsers (Firefox, IE, etc.) use deprecated language codes. As these
language codes will be removed in Django 1.9, these will be incorrectly
matched. For example zh-tw (traditional) will be interpreted as zh-hans
(simplified), which is wrong. So we should also accept these deprecated
language codes.
refs #18419 -- this is explicitly for browser compatibility
"""
g = get_language_from_request
request = self.rf.get("/", headers={"accept-language": "zh-cn,en"})
self.assertEqual(g(request), "zh-hans")
request = self.rf.get("/", headers={"accept-language": "zh-tw,en"})
self.assertEqual(g(request), "zh-hant")
def test_special_fallback_language(self):
"""
Some languages may have special fallbacks that don't follow the simple
'fr-ca' -> 'fr' logic (notably Chinese codes).
"""
request = self.rf.get("/", headers={"accept-language": "zh-my,en"})
self.assertEqual(get_language_from_request(request), "zh-hans")
def test_subsequent_code_fallback_language(self):
"""
Subsequent language codes should be used when the language code is not
supported.
"""
tests = [
("zh-Hans-CN", "zh-hans"),
("zh-hans-mo", "zh-hans"),
("zh-hans-HK", "zh-hans"),
("zh-Hant-HK", "zh-hant"),
("zh-hant-tw", "zh-hant"),
("zh-hant-SG", "zh-hant"),
]
for value, expected in tests:
with self.subTest(value=value):
request = self.rf.get("/", headers={"accept-language": f"{value},en"})
self.assertEqual(get_language_from_request(request), expected)
def test_parse_language_cookie(self):
g = get_language_from_request
request = self.rf.get("/")
request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "pt-br"
self.assertEqual("pt-br", g(request))
request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "pt"
self.assertEqual("pt", g(request))
request = self.rf.get("/", headers={"accept-language": "de"})
request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "es"
self.assertEqual("es", g(request))
# There isn't a Django translation to a US variation of the Spanish
# language, a safe assumption. When the user sets it as the preferred
# language, the main 'es' translation should be selected instead.
request = self.rf.get("/")
request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "es-us"
self.assertEqual(g(request), "es")
# There isn't a main language (zh) translation of Django but there is a
# translation to variation (zh-hans) the user sets zh-hans as the
# preferred language, it should be selected without falling back nor
# ignoring it.
request = self.rf.get("/", headers={"accept-language": "de"})
request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "zh-hans"
self.assertEqual(g(request), "zh-hans")
@override_settings(
USE_I18N=True,
LANGUAGES=[
("en", "English"),
("ar-dz", "Algerian Arabic"),
("de", "German"),
("de-at", "Austrian German"),
("pt-BR", "Portuguese (Brazil)"),
],
)
def test_get_supported_language_variant_real(self):
g = trans_real.get_supported_language_variant
self.assertEqual(g("en"), "en")
self.assertEqual(g("en-gb"), "en")
self.assertEqual(g("de"), "de")
self.assertEqual(g("de-at"), "de-at")
self.assertEqual(g("de-ch"), "de")
self.assertEqual(g("pt-br"), "pt-br")
self.assertEqual(g("pt-BR"), "pt-BR")
self.assertEqual(g("pt"), "pt-br")
self.assertEqual(g("pt-pt"), "pt-br")
self.assertEqual(g("ar-dz"), "ar-dz")
self.assertEqual(g("ar-DZ"), "ar-DZ")
with self.assertRaises(LookupError):
g("pt", strict=True)
with self.assertRaises(LookupError):
g("pt-pt", strict=True)
with self.assertRaises(LookupError):
g("xyz")
with self.assertRaises(LookupError):
g("xy-zz")
def test_get_supported_language_variant_null(self):
g = trans_null.get_supported_language_variant
self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE)
with self.assertRaises(LookupError):
g("pt")
with self.assertRaises(LookupError):
g("de")
with self.assertRaises(LookupError):
g("de-at")
with self.assertRaises(LookupError):
g("de", strict=True)
with self.assertRaises(LookupError):
g("de-at", strict=True)
with self.assertRaises(LookupError):
g("xyz")
@override_settings(
LANGUAGES=[
("en", "English"),
("en-latn-us", "Latin English"),
("de", "German"),
("de-1996", "German, orthography of 1996"),
("de-at", "Austrian German"),
("de-ch-1901", "German, Swiss variant, traditional orthography"),
("i-mingo", "Mingo"),
("kl-tunumiit", "Tunumiisiut"),
("nan-hani-tw", "Hanji"),
("pl", "Polish"),
],
)
def test_get_language_from_path_real(self):
g = trans_real.get_language_from_path
tests = [
("/pl/", "pl"),
("/pl", "pl"),
("/xyz/", None),
("/en/", "en"),
("/en-gb/", "en"),
("/en-latn-us/", "en-latn-us"),
("/en-Latn-US/", "en-Latn-US"),
("/de/", "de"),
("/de-1996/", "de-1996"),
("/de-at/", "de-at"),
("/de-AT/", "de-AT"),
("/de-ch/", "de"),
("/de-ch-1901/", "de-ch-1901"),
("/de-simple-page-test/", None),
("/i-mingo/", "i-mingo"),
("/kl-tunumiit/", "kl-tunumiit"),
("/nan-hani-tw/", "nan-hani-tw"),
]
for path, language in tests:
with self.subTest(path=path):
self.assertEqual(g(path), language)
def test_get_language_from_path_null(self):
g = trans_null.get_language_from_path
self.assertIsNone(g("/pl/"))
self.assertIsNone(g("/pl"))
self.assertIsNone(g("/xyz/"))
def test_cache_resetting(self):
"""
After setting LANGUAGE, the cache should be cleared and languages
previously valid should not be used (#14170).
"""
g = get_language_from_request
request = self.rf.get("/", headers={"accept-language": "pt-br"})
self.assertEqual("pt-br", g(request))
with self.settings(LANGUAGES=[("en", "English")]):
self.assertNotEqual("pt-br", g(request))
def test_i18n_patterns_returns_list(self):
with override_settings(USE_I18N=False):
self.assertIsInstance(i18n_patterns([]), list)
with override_settings(USE_I18N=True):
self.assertIsInstance(i18n_patterns([]), list)
class ResolutionOrderI18NTests(SimpleTestCase):
def setUp(self):
super().setUp()
activate("de")
def tearDown(self):
deactivate()
super().tearDown()
def assertGettext(self, msgid, msgstr):
result = gettext(msgid)
self.assertIn(
msgstr,
result,
"The string '%s' isn't in the translation of '%s'; the actual result is "
"'%s'." % (msgstr, msgid, result),
)
class AppResolutionOrderI18NTests(ResolutionOrderI18NTests):
@override_settings(LANGUAGE_CODE="de")
def test_app_translation(self):
# Original translation.
self.assertGettext("Date/time", "Datum/Zeit")
# Different translation.
with self.modify_settings(INSTALLED_APPS={"append": "i18n.resolution"}):
# Force refreshing translations.
activate("de")
# Doesn't work because it's added later in the list.
self.assertGettext("Date/time", "Datum/Zeit")
with self.modify_settings(
INSTALLED_APPS={"remove": "django.contrib.admin.apps.SimpleAdminConfig"}
):
# Force refreshing translations.
activate("de")
# Unless the original is removed from the list.
self.assertGettext("Date/time", "Datum/Zeit (APP)")
@override_settings(LOCALE_PATHS=extended_locale_paths)
class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests):
def test_locale_paths_translation(self):
self.assertGettext("Time", "LOCALE_PATHS")
def test_locale_paths_override_app_translation(self):
with self.settings(INSTALLED_APPS=["i18n.resolution"]):
self.assertGettext("Time", "LOCALE_PATHS")
class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests):
def test_django_fallback(self):
self.assertEqual(gettext("Date/time"), "Datum/Zeit")
@override_settings(INSTALLED_APPS=["i18n.territorial_fallback"])
class TranslationFallbackI18NTests(ResolutionOrderI18NTests):
def test_sparse_territory_catalog(self):
"""
Untranslated strings for territorial language variants use the
translations of the generic language. In this case, the de-de
translation falls back to de.
"""
with translation.override("de-de"):
self.assertGettext("Test 1 (en)", "(de-de)")
self.assertGettext("Test 2 (en)", "(de)")
class TestModels(TestCase):
def test_lazy(self):
tm = TestModel()
tm.save()
def test_safestr(self):
c = Company(cents_paid=12, products_delivered=1)
c.name = SafeString("Iñtërnâtiônàlizætiøn1")
c.save()
class TestLanguageInfo(SimpleTestCase):
def test_localized_language_info(self):
li = get_language_info("de")
self.assertEqual(li["code"], "de")
self.assertEqual(li["name_local"], "Deutsch")
self.assertEqual(li["name"], "German")
self.assertIs(li["bidi"], False)
def test_unknown_language_code(self):
with self.assertRaisesMessage(KeyError, "Unknown language code xx"):
get_language_info("xx")
with translation.override("xx"):
# A language with no translation catalogs should fallback to the
# untranslated string.
self.assertEqual(gettext("Title"), "Title")
def test_unknown_only_country_code(self):
li = get_language_info("de-xx")
self.assertEqual(li["code"], "de")
self.assertEqual(li["name_local"], "Deutsch")
self.assertEqual(li["name"], "German")
self.assertIs(li["bidi"], False)
def test_unknown_language_code_and_country_code(self):
with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"):
get_language_info("xx-xx")
def test_fallback_language_code(self):
"""
get_language_info return the first fallback language info if the lang_info
struct does not contain the 'name' key.
"""
li = get_language_info("zh-my")
self.assertEqual(li["code"], "zh-hans")
li = get_language_info("zh-hans")
self.assertEqual(li["code"], "zh-hans")
@override_settings(
USE_I18N=True,
LANGUAGES=[
("en", "English"),
("fr", "French"),
],
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
],
ROOT_URLCONF="i18n.urls",
)
class LocaleMiddlewareTests(TestCase):
def test_streaming_response(self):
# Regression test for #5241
response = self.client.get("/fr/streaming/")
self.assertContains(response, "Oui/Non")
response = self.client.get("/en/streaming/")
self.assertContains(response, "Yes/No")
@override_settings(
USE_I18N=True,
LANGUAGES=[
("en", "English"),
("de", "German"),
("fr", "French"),
],
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
],
ROOT_URLCONF="i18n.urls_default_unprefixed",
LANGUAGE_CODE="en",
)
class UnprefixedDefaultLanguageTests(SimpleTestCase):
def test_default_lang_without_prefix(self):
"""
With i18n_patterns(..., prefix_default_language=False), the default
language (settings.LANGUAGE_CODE) should be accessible without a prefix.
"""
response = self.client.get("/simple/")
self.assertEqual(response.content, b"Yes")
def test_other_lang_with_prefix(self):
response = self.client.get("/fr/simple/")
self.assertEqual(response.content, b"Oui")
def test_unprefixed_language_with_accept_language(self):
"""'Accept-Language' is respected."""
response = self.client.get("/simple/", headers={"accept-language": "fr"})
self.assertRedirects(response, "/fr/simple/")
def test_unprefixed_language_with_cookie_language(self):
"""A language set in the cookies is respected."""
self.client.cookies.load({settings.LANGUAGE_COOKIE_NAME: "fr"})
response = self.client.get("/simple/")
self.assertRedirects(response, "/fr/simple/")
def test_unprefixed_language_with_non_valid_language(self):
response = self.client.get("/simple/", headers={"accept-language": "fi"})
self.assertEqual(response.content, b"Yes")
self.client.cookies.load({settings.LANGUAGE_COOKIE_NAME: "fi"})
response = self.client.get("/simple/")
self.assertEqual(response.content, b"Yes")
def test_page_with_dash(self):
# A page starting with /de* shouldn't match the 'de' language code.
response = self.client.get("/de-simple-page-test/")
self.assertEqual(response.content, b"Yes")
def test_no_redirect_on_404(self):
"""
A request for a nonexistent URL shouldn't cause a redirect to
/<default_language>/<request_url> when prefix_default_language=False and
/<default_language>/<request_url> has a URL match (#27402).
"""
# A match for /group1/group2/ must exist for this to act as a
# regression test.
response = self.client.get("/group1/group2/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/nonexistent/")
self.assertEqual(response.status_code, 404)
@override_settings(
USE_I18N=True,
LANGUAGES=[
("bg", "Bulgarian"),
("en-us", "English"),
("pt-br", "Portuguese (Brazil)"),
],
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
],
ROOT_URLCONF="i18n.urls",
)
class CountrySpecificLanguageTests(SimpleTestCase):
rf = RequestFactory()
def test_check_for_language(self):
self.assertTrue(check_for_language("en"))
self.assertTrue(check_for_language("en-us"))
self.assertTrue(check_for_language("en-US"))
self.assertFalse(check_for_language("en_US"))
self.assertTrue(check_for_language("be"))
self.assertTrue(check_for_language("be@latin"))
self.assertTrue(check_for_language("sr-RS@latin"))
self.assertTrue(check_for_language("sr-RS@12345"))
self.assertFalse(check_for_language("en-ü"))
self.assertFalse(check_for_language("en\x00"))
self.assertFalse(check_for_language(None))
self.assertFalse(check_for_language("be@ "))
# Specifying encoding is not supported (Django enforces UTF-8)
self.assertFalse(check_for_language("tr-TR.UTF-8"))
self.assertFalse(check_for_language("tr-TR.UTF8"))
self.assertFalse(check_for_language("de-DE.utf-8"))
def test_check_for_language_null(self):
self.assertIs(trans_null.check_for_language("en"), True)
def test_get_language_from_request(self):
# issue 19919
request = self.rf.get(
"/", headers={"accept-language": "en-US,en;q=0.8,bg;q=0.6,ru;q=0.4"}
)
lang = get_language_from_request(request)
self.assertEqual("en-us", lang)
request = self.rf.get(
"/", headers={"accept-language": "bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4"}
)
lang = get_language_from_request(request)
self.assertEqual("bg", lang)
def test_get_language_from_request_null(self):
lang = trans_null.get_language_from_request(None)
self.assertEqual(lang, None)
def test_specific_language_codes(self):
# issue 11915
request = self.rf.get(
"/", headers={"accept-language": "pt,en-US;q=0.8,en;q=0.6,ru;q=0.4"}
)
lang = get_language_from_request(request)
self.assertEqual("pt-br", lang)
request = self.rf.get(
"/", headers={"accept-language": "pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4"}
)
lang = get_language_from_request(request)
self.assertEqual("pt-br", lang)
class TranslationFilesMissing(SimpleTestCase):
def setUp(self):
super().setUp()
self.gettext_find_builtin = gettext_module.find
def tearDown(self):
gettext_module.find = self.gettext_find_builtin
super().tearDown()
def patchGettextFind(self):
gettext_module.find = lambda *args, **kw: None
def test_failure_finding_default_mo_files(self):
"""OSError is raised if the default language is unparseable."""
self.patchGettextFind()
trans_real._translations = {}
with self.assertRaises(OSError):
activate("en")
class NonDjangoLanguageTests(SimpleTestCase):
"""
A language non present in default Django languages can still be
installed/used by a Django project.
"""
@override_settings(
USE_I18N=True,
LANGUAGES=[
("en-us", "English"),
("xxx", "Somelanguage"),
],
LANGUAGE_CODE="xxx",
LOCALE_PATHS=[os.path.join(here, "commands", "locale")],
)
def test_non_django_language(self):
self.assertEqual(get_language(), "xxx")
self.assertEqual(gettext("year"), "reay")
@override_settings(USE_I18N=True)
def test_check_for_language(self):
with tempfile.TemporaryDirectory() as app_dir:
os.makedirs(os.path.join(app_dir, "locale", "dummy_Lang", "LC_MESSAGES"))
open(
os.path.join(
app_dir, "locale", "dummy_Lang", "LC_MESSAGES", "django.mo"
),
"w",
).close()
app_config = AppConfig("dummy_app", AppModuleStub(__path__=[app_dir]))
with mock.patch(
"django.apps.apps.get_app_configs", return_value=[app_config]
):
self.assertIs(check_for_language("dummy-lang"), True)
@override_settings(
USE_I18N=True,
LANGUAGES=[
("en-us", "English"),
# xyz language has no locale files
("xyz", "XYZ"),
],
)
@translation.override("xyz")
def test_plural_non_django_language(self):
self.assertEqual(get_language(), "xyz")
self.assertEqual(ngettext("year", "years", 2), "years")
@override_settings(USE_I18N=True)
class WatchForTranslationChangesTests(SimpleTestCase):
@override_settings(USE_I18N=False)
def test_i18n_disabled(self):
mocked_sender = mock.MagicMock()
watch_for_translation_changes(mocked_sender)
mocked_sender.watch_dir.assert_not_called()
def test_i18n_enabled(self):
mocked_sender = mock.MagicMock()
watch_for_translation_changes(mocked_sender)
self.assertGreater(mocked_sender.watch_dir.call_count, 1)
def test_i18n_locale_paths(self):
mocked_sender = mock.MagicMock()
with tempfile.TemporaryDirectory() as app_dir:
with self.settings(LOCALE_PATHS=[app_dir]):
watch_for_translation_changes(mocked_sender)
mocked_sender.watch_dir.assert_any_call(Path(app_dir), "**/*.mo")
def test_i18n_app_dirs(self):
mocked_sender = mock.MagicMock()
with self.settings(INSTALLED_APPS=["i18n.sampleproject"]):
watch_for_translation_changes(mocked_sender)
project_dir = Path(__file__).parent / "sampleproject" / "locale"
mocked_sender.watch_dir.assert_any_call(project_dir, "**/*.mo")
def test_i18n_app_dirs_ignore_django_apps(self):
mocked_sender = mock.MagicMock()
with self.settings(INSTALLED_APPS=["django.contrib.admin"]):
watch_for_translation_changes(mocked_sender)
mocked_sender.watch_dir.assert_called_once_with(Path("locale"), "**/*.mo")
def test_i18n_local_locale(self):
mocked_sender = mock.MagicMock()
watch_for_translation_changes(mocked_sender)
locale_dir = Path(__file__).parent / "locale"
mocked_sender.watch_dir.assert_any_call(locale_dir, "**/*.mo")
class TranslationFileChangedTests(SimpleTestCase):
def setUp(self):
self.gettext_translations = gettext_module._translations.copy()
self.trans_real_translations = trans_real._translations.copy()
def tearDown(self):
gettext._translations = self.gettext_translations
trans_real._translations = self.trans_real_translations
def test_ignores_non_mo_files(self):
gettext_module._translations = {"foo": "bar"}
path = Path("test.py")
self.assertIsNone(translation_file_changed(None, path))
self.assertEqual(gettext_module._translations, {"foo": "bar"})
def test_resets_cache_with_mo_files(self):
gettext_module._translations = {"foo": "bar"}
trans_real._translations = {"foo": "bar"}
trans_real._default = 1
trans_real._active = False
path = Path("test.mo")
self.assertIs(translation_file_changed(None, path), True)
self.assertEqual(gettext_module._translations, {})
self.assertEqual(trans_real._translations, {})
self.assertIsNone(trans_real._default)
self.assertIsInstance(trans_real._active, Local)
class UtilsTests(SimpleTestCase):
def test_round_away_from_one(self):
tests = [
(0, 0),
(0.0, 0),
(0.25, 0),
(0.5, 0),
(0.75, 0),
(1, 1),
(1.0, 1),
(1.25, 2),
(1.5, 2),
(1.75, 2),
(-0.0, 0),
(-0.25, -1),
(-0.5, -1),
(-0.75, -1),
(-1, -1),
(-1.0, -1),
(-1.25, -2),
(-1.5, -2),
(-1.75, -2),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertEqual(round_away_from_one(value), expected)
|
ad73a9b7d58fbf8486abd25aafece61730afbe15ac4a8c7bccccd97089960ad6 | import collections.abc
from datetime import datetime
from math import ceil
from operator import attrgetter
from unittest import skipUnless
from django.core.exceptions import FieldError
from django.db import connection, models
from django.db.models import (
BooleanField,
Case,
Exists,
ExpressionWrapper,
F,
Max,
OuterRef,
Q,
Subquery,
Value,
When,
)
from django.db.models.functions import Cast, Substr
from django.db.models.lookups import (
Exact,
GreaterThan,
GreaterThanOrEqual,
IsNull,
LessThan,
LessThanOrEqual,
)
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import isolate_apps
from .models import (
Article,
Author,
Freebie,
Game,
IsNullWithNoneAsRHS,
Player,
Product,
Season,
Stock,
Tag,
)
class LookupTests(TestCase):
@classmethod
def setUpTestData(cls):
# Create a few Authors.
cls.au1 = Author.objects.create(name="Author 1", alias="a1")
cls.au2 = Author.objects.create(name="Author 2", alias="a2")
# Create a few Articles.
cls.a1 = Article.objects.create(
headline="Article 1",
pub_date=datetime(2005, 7, 26),
author=cls.au1,
slug="a1",
)
cls.a2 = Article.objects.create(
headline="Article 2",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a2",
)
cls.a3 = Article.objects.create(
headline="Article 3",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a3",
)
cls.a4 = Article.objects.create(
headline="Article 4",
pub_date=datetime(2005, 7, 28),
author=cls.au1,
slug="a4",
)
cls.a5 = Article.objects.create(
headline="Article 5",
pub_date=datetime(2005, 8, 1, 9, 0),
author=cls.au2,
slug="a5",
)
cls.a6 = Article.objects.create(
headline="Article 6",
pub_date=datetime(2005, 8, 1, 8, 0),
author=cls.au2,
slug="a6",
)
cls.a7 = Article.objects.create(
headline="Article 7",
pub_date=datetime(2005, 7, 27),
author=cls.au2,
slug="a7",
)
# Create a few Tags.
cls.t1 = Tag.objects.create(name="Tag 1")
cls.t1.articles.add(cls.a1, cls.a2, cls.a3)
cls.t2 = Tag.objects.create(name="Tag 2")
cls.t2.articles.add(cls.a3, cls.a4, cls.a5)
cls.t3 = Tag.objects.create(name="Tag 3")
cls.t3.articles.add(cls.a5, cls.a6, cls.a7)
def test_exists(self):
# We can use .exists() to check that there are some
self.assertTrue(Article.objects.exists())
for a in Article.objects.all():
a.delete()
# There should be none now!
self.assertFalse(Article.objects.exists())
def test_lookup_int_as_str(self):
# Integer value can be queried using string
self.assertSequenceEqual(
Article.objects.filter(id__iexact=str(self.a1.id)),
[self.a1],
)
@skipUnlessDBFeature("supports_date_lookup_using_string")
def test_lookup_date_as_str(self):
# A date lookup can be performed using a string search
self.assertSequenceEqual(
Article.objects.filter(pub_date__startswith="2005"),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_iterator(self):
# Each QuerySet gets iterator(), which is a generator that "lazily"
# returns results using database-level iteration.
self.assertIsInstance(Article.objects.iterator(), collections.abc.Iterator)
self.assertQuerySetEqual(
Article.objects.iterator(),
[
"Article 5",
"Article 6",
"Article 4",
"Article 2",
"Article 3",
"Article 7",
"Article 1",
],
transform=attrgetter("headline"),
)
# iterator() can be used on any QuerySet.
self.assertQuerySetEqual(
Article.objects.filter(headline__endswith="4").iterator(),
["Article 4"],
transform=attrgetter("headline"),
)
def test_count(self):
# count() returns the number of objects matching search criteria.
self.assertEqual(Article.objects.count(), 7)
self.assertEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3
)
self.assertEqual(
Article.objects.filter(headline__startswith="Blah blah").count(), 0
)
# count() should respect sliced query sets.
articles = Article.objects.all()
self.assertEqual(articles.count(), 7)
self.assertEqual(articles[:4].count(), 4)
self.assertEqual(articles[1:100].count(), 6)
self.assertEqual(articles[10:100].count(), 0)
# Date and date/time lookups can also be done with strings.
self.assertEqual(
Article.objects.filter(pub_date__exact="2005-07-27 00:00:00").count(), 3
)
def test_in_bulk(self):
# in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects.
arts = Article.objects.in_bulk([self.a1.id, self.a2.id])
self.assertEqual(arts[self.a1.id], self.a1)
self.assertEqual(arts[self.a2.id], self.a2)
self.assertEqual(
Article.objects.in_bulk(),
{
self.a1.id: self.a1,
self.a2.id: self.a2,
self.a3.id: self.a3,
self.a4.id: self.a4,
self.a5.id: self.a5,
self.a6.id: self.a6,
self.a7.id: self.a7,
},
)
self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3})
self.assertEqual(
Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3}
)
self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk([1000]), {})
self.assertEqual(Article.objects.in_bulk([]), {})
self.assertEqual(
Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1}
)
self.assertEqual(Article.objects.in_bulk(iter([])), {})
with self.assertRaises(TypeError):
Article.objects.in_bulk(headline__startswith="Blah")
def test_in_bulk_lots_of_ids(self):
test_range = 2000
max_query_params = connection.features.max_query_params
expected_num_queries = (
ceil(test_range / max_query_params) if max_query_params else 1
)
Author.objects.bulk_create(
[Author() for i in range(test_range - Author.objects.count())]
)
authors = {author.pk: author for author in Author.objects.all()}
with self.assertNumQueries(expected_num_queries):
self.assertEqual(Author.objects.in_bulk(authors), authors)
def test_in_bulk_with_field(self):
self.assertEqual(
Article.objects.in_bulk(
[self.a1.slug, self.a2.slug, self.a3.slug], field_name="slug"
),
{
self.a1.slug: self.a1,
self.a2.slug: self.a2,
self.a3.slug: self.a3,
},
)
def test_in_bulk_meta_constraint(self):
season_2011 = Season.objects.create(year=2011)
season_2012 = Season.objects.create(year=2012)
Season.objects.create(year=2013)
self.assertEqual(
Season.objects.in_bulk(
[season_2011.year, season_2012.year],
field_name="year",
),
{season_2011.year: season_2011, season_2012.year: season_2012},
)
def test_in_bulk_non_unique_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'author' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.in_bulk([self.au1], field_name="author")
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_distinct_field(self):
self.assertEqual(
Article.objects.order_by("headline")
.distinct("headline")
.in_bulk(
[self.a1.headline, self.a5.headline],
field_name="headline",
),
{self.a1.headline: self.a1, self.a5.headline: self.a5},
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_multiple_distinct_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'pub_date' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.order_by("headline", "pub_date").distinct(
"headline",
"pub_date",
).in_bulk(field_name="pub_date")
@isolate_apps("lookup")
def test_in_bulk_non_unique_meta_constaint(self):
class Model(models.Model):
ean = models.CharField(max_length=100)
brand = models.CharField(max_length=100)
name = models.CharField(max_length=80)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["ean"],
name="partial_ean_unique",
condition=models.Q(is_active=True),
),
models.UniqueConstraint(
fields=["brand", "name"],
name="together_brand_name_unique",
),
]
msg = "in_bulk()'s field_name must be a unique field but '%s' isn't."
for field_name in ["brand", "ean"]:
with self.subTest(field_name=field_name):
with self.assertRaisesMessage(ValueError, msg % field_name):
Model.objects.in_bulk(field_name=field_name)
def test_in_bulk_sliced_queryset(self):
msg = "Cannot use 'limit' or 'offset' with in_bulk()."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].in_bulk([self.a1.id, self.a2.id])
def test_values(self):
# values() returns a list of dictionaries instead of object instances --
# and you can specify which fields you want to retrieve.
self.assertSequenceEqual(
Article.objects.values("headline"),
[
{"headline": "Article 5"},
{"headline": "Article 6"},
{"headline": "Article 4"},
{"headline": "Article 2"},
{"headline": "Article 3"},
{"headline": "Article 7"},
{"headline": "Article 1"},
],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values("id"),
[{"id": self.a2.id}, {"id": self.a3.id}, {"id": self.a7.id}],
)
self.assertSequenceEqual(
Article.objects.values("id", "headline"),
[
{"id": self.a5.id, "headline": "Article 5"},
{"id": self.a6.id, "headline": "Article 6"},
{"id": self.a4.id, "headline": "Article 4"},
{"id": self.a2.id, "headline": "Article 2"},
{"id": self.a3.id, "headline": "Article 3"},
{"id": self.a7.id, "headline": "Article 7"},
{"id": self.a1.id, "headline": "Article 1"},
],
)
# You can use values() with iterator() for memory savings,
# because iterator() uses database-level iteration.
self.assertSequenceEqual(
list(Article.objects.values("id", "headline").iterator()),
[
{"headline": "Article 5", "id": self.a5.id},
{"headline": "Article 6", "id": self.a6.id},
{"headline": "Article 4", "id": self.a4.id},
{"headline": "Article 2", "id": self.a2.id},
{"headline": "Article 3", "id": self.a3.id},
{"headline": "Article 7", "id": self.a7.id},
{"headline": "Article 1", "id": self.a1.id},
],
)
# The values() method works with "extra" fields specified in extra(select).
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_one"
),
[
{"id": self.a5.id, "id_plus_one": self.a5.id + 1},
{"id": self.a6.id, "id_plus_one": self.a6.id + 1},
{"id": self.a4.id, "id_plus_one": self.a4.id + 1},
{"id": self.a2.id, "id_plus_one": self.a2.id + 1},
{"id": self.a3.id, "id_plus_one": self.a3.id + 1},
{"id": self.a7.id, "id_plus_one": self.a7.id + 1},
{"id": self.a1.id, "id_plus_one": self.a1.id + 1},
],
)
data = {
"id_plus_one": "id+1",
"id_plus_two": "id+2",
"id_plus_three": "id+3",
"id_plus_four": "id+4",
"id_plus_five": "id+5",
"id_plus_six": "id+6",
"id_plus_seven": "id+7",
"id_plus_eight": "id+8",
}
self.assertSequenceEqual(
Article.objects.filter(id=self.a1.id).extra(select=data).values(*data),
[
{
"id_plus_one": self.a1.id + 1,
"id_plus_two": self.a1.id + 2,
"id_plus_three": self.a1.id + 3,
"id_plus_four": self.a1.id + 4,
"id_plus_five": self.a1.id + 5,
"id_plus_six": self.a1.id + 6,
"id_plus_seven": self.a1.id + 7,
"id_plus_eight": self.a1.id + 8,
}
],
)
# You can specify fields from forward and reverse relations, just like filter().
self.assertSequenceEqual(
Article.objects.values("headline", "author__name"),
[
{"headline": self.a5.headline, "author__name": self.au2.name},
{"headline": self.a6.headline, "author__name": self.au2.name},
{"headline": self.a4.headline, "author__name": self.au1.name},
{"headline": self.a2.headline, "author__name": self.au1.name},
{"headline": self.a3.headline, "author__name": self.au1.name},
{"headline": self.a7.headline, "author__name": self.au2.name},
{"headline": self.a1.headline, "author__name": self.au1.name},
],
)
self.assertSequenceEqual(
Author.objects.values("name", "article__headline").order_by(
"name", "article__headline"
),
[
{"name": self.au1.name, "article__headline": self.a1.headline},
{"name": self.au1.name, "article__headline": self.a2.headline},
{"name": self.au1.name, "article__headline": self.a3.headline},
{"name": self.au1.name, "article__headline": self.a4.headline},
{"name": self.au2.name, "article__headline": self.a5.headline},
{"name": self.au2.name, "article__headline": self.a6.headline},
{"name": self.au2.name, "article__headline": self.a7.headline},
],
)
self.assertSequenceEqual(
(
Author.objects.values(
"name", "article__headline", "article__tag__name"
).order_by("name", "article__headline", "article__tag__name")
),
[
{
"name": self.au1.name,
"article__headline": self.a1.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a2.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au1.name,
"article__headline": self.a4.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a6.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a7.headline,
"article__tag__name": self.t3.name,
},
],
)
# However, an exception FieldDoesNotExist will be thrown if you specify
# a nonexistent field name in values() (a field that is neither in the
# model nor in extra(select)).
msg = (
"Cannot resolve keyword 'id_plus_two' into field. Choices are: "
"author, author_id, headline, id, id_plus_one, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_two"
)
# If you don't specify field names to values(), all are returned.
self.assertSequenceEqual(
Article.objects.filter(id=self.a5.id).values(),
[
{
"id": self.a5.id,
"author_id": self.au2.id,
"headline": "Article 5",
"pub_date": datetime(2005, 8, 1, 9, 0),
"slug": "a5",
}
],
)
def test_values_list(self):
# values_list() is similar to values(), except that the results are
# returned as a list of tuples, rather than a list of dictionaries.
# Within each tuple, the order of the elements is the same as the order
# of fields in the values_list() call.
self.assertSequenceEqual(
Article.objects.values_list("headline"),
[
("Article 5",),
("Article 6",),
("Article 4",),
("Article 2",),
("Article 3",),
("Article 7",),
("Article 1",),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id").order_by("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id", flat=True).order_by("id"),
[
self.a1.id,
self.a2.id,
self.a3.id,
self.a4.id,
self.a5.id,
self.a6.id,
self.a7.id,
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id_plus_one", "id"),
[
(self.a1.id + 1, self.a1.id),
(self.a2.id + 1, self.a2.id),
(self.a3.id + 1, self.a3.id),
(self.a4.id + 1, self.a4.id),
(self.a5.id + 1, self.a5.id),
(self.a6.id + 1, self.a6.id),
(self.a7.id + 1, self.a7.id),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id", "id_plus_one"),
[
(self.a1.id, self.a1.id + 1),
(self.a2.id, self.a2.id + 1),
(self.a3.id, self.a3.id + 1),
(self.a4.id, self.a4.id + 1),
(self.a5.id, self.a5.id + 1),
(self.a6.id, self.a6.id + 1),
(self.a7.id, self.a7.id + 1),
],
)
args = ("name", "article__headline", "article__tag__name")
self.assertSequenceEqual(
Author.objects.values_list(*args).order_by(*args),
[
(self.au1.name, self.a1.headline, self.t1.name),
(self.au1.name, self.a2.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t2.name),
(self.au1.name, self.a4.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t3.name),
(self.au2.name, self.a6.headline, self.t3.name),
(self.au2.name, self.a7.headline, self.t3.name),
],
)
with self.assertRaises(TypeError):
Article.objects.values_list("id", "headline", flat=True)
def test_get_next_previous_by(self):
# Every DateField and DateTimeField creates get_next_by_FOO() and
# get_previous_by_FOO() methods. In the case of identical date values,
# these methods will use the ID as a fallback check. This guarantees
# that no records are skipped or duplicated.
self.assertEqual(repr(self.a1.get_next_by_pub_date()), "<Article: Article 2>")
self.assertEqual(repr(self.a2.get_next_by_pub_date()), "<Article: Article 3>")
self.assertEqual(
repr(self.a2.get_next_by_pub_date(headline__endswith="6")),
"<Article: Article 6>",
)
self.assertEqual(repr(self.a3.get_next_by_pub_date()), "<Article: Article 7>")
self.assertEqual(repr(self.a4.get_next_by_pub_date()), "<Article: Article 6>")
with self.assertRaises(Article.DoesNotExist):
self.a5.get_next_by_pub_date()
self.assertEqual(repr(self.a6.get_next_by_pub_date()), "<Article: Article 5>")
self.assertEqual(repr(self.a7.get_next_by_pub_date()), "<Article: Article 4>")
self.assertEqual(
repr(self.a7.get_previous_by_pub_date()), "<Article: Article 3>"
)
self.assertEqual(
repr(self.a6.get_previous_by_pub_date()), "<Article: Article 4>"
)
self.assertEqual(
repr(self.a5.get_previous_by_pub_date()), "<Article: Article 6>"
)
self.assertEqual(
repr(self.a4.get_previous_by_pub_date()), "<Article: Article 7>"
)
self.assertEqual(
repr(self.a3.get_previous_by_pub_date()), "<Article: Article 2>"
)
self.assertEqual(
repr(self.a2.get_previous_by_pub_date()), "<Article: Article 1>"
)
def test_escaping(self):
# Underscores, percent signs and backslashes have special meaning in the
# underlying SQL code, but Django handles the quoting of them automatically.
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=datetime(2005, 11, 20)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article_"),
[a8],
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=datetime(2005, 11, 21)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article%"),
[a9],
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=datetime(2005, 11, 22)
)
self.assertSequenceEqual(
Article.objects.filter(headline__contains="\\"),
[a10],
)
def test_exclude(self):
pub_date = datetime(2005, 11, 20)
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=pub_date
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=pub_date
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=pub_date
)
# exclude() is the opposite of filter() when doing lookups:
self.assertSequenceEqual(
Article.objects.filter(headline__contains="Article").exclude(
headline__contains="with"
),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline__startswith="Article_"),
[a10, a9, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline="Article 7"),
[a10, a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a1],
)
def test_none(self):
# none() returns a QuerySet that behaves like any other QuerySet object
self.assertSequenceEqual(Article.objects.none(), [])
self.assertSequenceEqual(
Article.objects.none().filter(headline__startswith="Article"), []
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article").none(), []
)
self.assertEqual(Article.objects.none().count(), 0)
self.assertEqual(
Article.objects.none().update(headline="This should not take effect"), 0
)
self.assertSequenceEqual(list(Article.objects.none().iterator()), [])
def test_in(self):
self.assertSequenceEqual(
Article.objects.exclude(id__in=[]),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_in_empty_list(self):
self.assertSequenceEqual(Article.objects.filter(id__in=[]), [])
def test_in_different_database(self):
with self.assertRaisesMessage(
ValueError,
"Subqueries aren't allowed across different databases. Force the "
"inner query to be evaluated using `list(inner_query)`.",
):
list(Article.objects.filter(id__in=Article.objects.using("other").all()))
def test_in_keeps_value_ordering(self):
query = (
Article.objects.filter(slug__in=["a%d" % i for i in range(1, 8)])
.values("pk")
.query
)
self.assertIn(" IN (a1, a2, a3, a4, a5, a6, a7) ", str(query))
def test_in_ignore_none(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, self.a1.id]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_in_ignore_solo_none(self):
with self.assertNumQueries(0):
self.assertSequenceEqual(Article.objects.filter(id__in=[None]), [])
def test_in_ignore_none_with_unhashable_items(self):
class UnhashableInt(int):
__hash__ = None
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, UnhashableInt(self.a1.id)]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_error_messages(self):
# Programming errors are pointed out with nice error messages
with self.assertRaisesMessage(
FieldError,
"Cannot resolve keyword 'pub_date_year' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag",
):
Article.objects.filter(pub_date_year="2005").count()
def test_unsupported_lookups(self):
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'starts' for CharField or join on the field "
"not permitted, perhaps you meant startswith or istartswith?",
):
Article.objects.filter(headline__starts="Article")
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'is_null' for DateTimeField or join on the field "
"not permitted, perhaps you meant isnull?",
):
Article.objects.filter(pub_date__is_null=True)
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'gobbledygook' for DateTimeField or join on the field "
"not permitted.",
):
Article.objects.filter(pub_date__gobbledygook="blahblah")
def test_relation_nested_lookup_error(self):
# An invalid nested lookup on a related field raises a useful error.
msg = (
"Unsupported lookup 'editor' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(author__editor__name="James")
msg = (
"Unsupported lookup 'foo' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(articles__foo="bar")
def test_regex(self):
# Create some articles with a bit more interesting headlines for
# testing field lookups.
Article.objects.all().delete()
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="f"),
Article(pub_date=now, headline="fo"),
Article(pub_date=now, headline="foo"),
Article(pub_date=now, headline="fooo"),
Article(pub_date=now, headline="hey-Foo"),
Article(pub_date=now, headline="bar"),
Article(pub_date=now, headline="AbBa"),
Article(pub_date=now, headline="baz"),
Article(pub_date=now, headline="baxZ"),
]
)
# zero-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo", "hey-Foo"]),
)
# one-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo+"),
Article.objects.filter(headline__in=["fo", "foo", "fooo"]),
)
# wildcard
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fooo?"),
Article.objects.filter(headline__in=["foo", "fooo"]),
)
# leading anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^b"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"^a"),
Article.objects.filter(headline="AbBa"),
)
# trailing anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"z$"),
Article.objects.filter(headline="baz"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"z$"),
Article.objects.filter(headline__in=["baxZ", "baz"]),
)
# character sets
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba[rz]"),
Article.objects.filter(headline__in=["bar", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba.[RxZ]"),
Article.objects.filter(headline="baxZ"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"ba[RxZ]"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
# and more articles:
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
# alternation
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
"ooF",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^foo(f|b)"),
Article.objects.filter(headline__in=["foobar", "foobarbaz", "foobaz"]),
)
# greedy matching
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b.*az"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"baz",
"bazbaRFOO",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"b.*ar"),
Article.objects.filter(
headline__in=[
"bar",
"barfoobaz",
"bazbaRFOO",
"foobar",
"foobarbaz",
]
),
)
@skipUnlessDBFeature("supports_regex_backreferencing")
def test_regex_backreferencing(self):
# grouping and backreferences
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b(.).*b\1").values_list(
"headline", flat=True
),
["barfoobaz", "bazbaRFOO", "foobarbaz"],
)
def test_regex_null(self):
"""
A regex lookup does not fail on null/None values
"""
Season.objects.create(year=2012, gt=None)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^$"), [])
def test_regex_non_string(self):
"""
A regex lookup does not fail on non-string fields
"""
s = Season.objects.create(year=2013, gt=444)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^444$"), [s])
def test_regex_non_ascii(self):
"""
A regex lookup does not trip on non-ASCII characters.
"""
Player.objects.create(name="\u2660")
Player.objects.get(name__regex="\u2660")
def test_nonfield_lookups(self):
"""
A lookup query containing non-fields raises the proper exception.
"""
msg = (
"Unsupported lookup 'blahblah' for CharField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah=99)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah__exact=99)
msg = (
"Cannot resolve keyword 'blahblah' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(blahblah=99)
def test_lookup_collision(self):
"""
Genuine field names don't collide with built-in lookup types
('year', 'gt', 'range', 'in' etc.) (#11670).
"""
# 'gt' is used as a code number for the year, e.g. 111=>2009.
season_2009 = Season.objects.create(year=2009, gt=111)
season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2010 = Season.objects.create(year=2010, gt=222)
season_2010.games.create(home="Houston Astros", away="Chicago Cubs")
season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers")
season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011 = Season.objects.create(year=2011, gt=333)
season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers")
hunter_pence = Player.objects.create(name="Hunter Pence")
hunter_pence.games.set(Game.objects.filter(season__year__in=[2009, 2010]))
pudge = Player.objects.create(name="Ivan Rodriquez")
pudge.games.set(Game.objects.filter(season__year=2009))
pedro_feliz = Player.objects.create(name="Pedro Feliz")
pedro_feliz.games.set(Game.objects.filter(season__year__in=[2011]))
johnson = Player.objects.create(name="Johnson")
johnson.games.set(Game.objects.filter(season__year__in=[2011]))
# Games in 2010
self.assertEqual(Game.objects.filter(season__year=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__gt=222).count(), 3)
self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3)
# Games in 2011
self.assertEqual(Game.objects.filter(season__year=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__gt=333).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2)
self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2)
# Games played in 2010 and 2011
self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5)
self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5)
# Players who played in 2009
self.assertEqual(
Player.objects.filter(games__season__year=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=111).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2
)
# Players who played in 2010
self.assertEqual(
Player.objects.filter(games__season__year=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt=222).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1
)
# Players who played in 2011
self.assertEqual(
Player.objects.filter(games__season__year=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=333).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2
)
def test_chain_date_time_lookups(self):
self.assertCountEqual(
Article.objects.filter(pub_date__month__gt=7),
[self.a5, self.a6],
)
self.assertCountEqual(
Article.objects.filter(pub_date__day__gte=27),
[self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__hour__lt=8),
[self.a1, self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__minute__lte=0),
[self.a1, self.a2, self.a3, self.a4, self.a5, self.a6, self.a7],
)
def test_exact_none_transform(self):
"""Transforms are used for __exact=None."""
Season.objects.create(year=1, nulled_text_field="not null")
self.assertFalse(Season.objects.filter(nulled_text_field__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__exact=None))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled=None))
def test_exact_sliced_queryset_limit_one(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[:1]),
[self.a1, self.a2, self.a3, self.a4],
)
def test_exact_sliced_queryset_limit_one_offset(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[1:2]),
[self.a5, self.a6, self.a7],
)
def test_exact_sliced_queryset_not_limited_to_one(self):
msg = (
"The QuerySet value for an exact lookup must be limited to one "
"result using slicing."
)
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[:2]))
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[1:]))
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
product = Product.objects.create(name="Paper", qty_target=5000)
Stock.objects.create(product=product, short=False, qty_available=5100)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
qs = Stock.objects.filter(short=True)
self.assertSequenceEqual(qs, [stock_1])
self.assertIn(
"%s = True" % connection.ops.quote_name("short"),
str(qs.query),
)
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield_annotation(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
qs = Author.objects.annotate(
case=Case(
When(alias="a1", then=True),
default=False,
output_field=BooleanField(),
)
).filter(case=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
qs = Author.objects.annotate(
wrapped=ExpressionWrapper(Q(alias="a1"), output_field=BooleanField()),
).filter(wrapped=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
# EXISTS(...) shouldn't be compared to a boolean value.
qs = Author.objects.annotate(
exists=Exists(Author.objects.filter(alias="a1", pk=OuterRef("pk"))),
).filter(exists=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertNotIn(" = True", str(qs.query))
def test_custom_field_none_rhs(self):
"""
__exact=value is transformed to __isnull=True if Field.get_prep_value()
converts value to None.
"""
season = Season.objects.create(year=2012, nulled_text_field=None)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull=True)
)
self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field=""))
def test_pattern_lookups_with_substr(self):
a = Author.objects.create(name="John Smith", alias="Johx")
b = Author.objects.create(name="Rhonda Simpson", alias="sonx")
tests = (
("startswith", [a]),
("istartswith", [a]),
("contains", [a, b]),
("icontains", [a, b]),
("endswith", [b]),
("iendswith", [b]),
)
for lookup, result in tests:
with self.subTest(lookup=lookup):
authors = Author.objects.filter(
**{"name__%s" % lookup: Substr("alias", 1, 3)}
)
self.assertCountEqual(authors, result)
def test_custom_lookup_none_rhs(self):
"""Lookup.can_use_none_as_rhs=True allows None as a lookup value."""
season = Season.objects.create(year=2012, nulled_text_field=None)
query = Season.objects.get_queryset().query
field = query.model._meta.get_field("nulled_text_field")
self.assertIsInstance(
query.build_lookup(["isnull_none_rhs"], field, None), IsNullWithNoneAsRHS
)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull_none_rhs=True)
)
def test_exact_exists(self):
qs = Article.objects.filter(pk=OuterRef("pk"))
seasons = Season.objects.annotate(pk_exists=Exists(qs)).filter(
pk_exists=Exists(qs),
)
self.assertCountEqual(seasons, Season.objects.all())
def test_nested_outerref_lhs(self):
tag = Tag.objects.create(name=self.au1.alias)
tag.articles.add(self.a1)
qs = Tag.objects.annotate(
has_author_alias_match=Exists(
Article.objects.annotate(
author_exists=Exists(
Author.objects.filter(alias=OuterRef(OuterRef("name")))
),
).filter(author_exists=True)
),
)
self.assertEqual(qs.get(has_author_alias_match=True), tag)
def test_exact_query_rhs_with_selected_columns(self):
newest_author = Author.objects.create(name="Author 2")
authors_max_ids = (
Author.objects.filter(
name="Author 2",
)
.values(
"name",
)
.annotate(
max_id=Max("id"),
)
.values("max_id")
)
authors = Author.objects.filter(id=authors_max_ids[:1])
self.assertEqual(authors.get(), newest_author)
def test_isnull_non_boolean_value(self):
msg = "The QuerySet value for an isnull lookup must be True or False."
tests = [
Author.objects.filter(alias__isnull=1),
Article.objects.filter(author__isnull=1),
Season.objects.filter(games__isnull=1),
Freebie.objects.filter(stock__isnull=1),
]
for qs in tests:
with self.subTest(qs=qs):
with self.assertRaisesMessage(ValueError, msg):
qs.exists()
def test_lookup_rhs(self):
product = Product.objects.create(name="GME", qty_target=5000)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
stock_2 = Stock.objects.create(product=product, short=False, qty_available=5100)
Stock.objects.create(product=product, short=False, qty_available=4000)
self.assertCountEqual(
Stock.objects.filter(short=Q(qty_available__lt=F("product__qty_target"))),
[stock_1, stock_2],
)
self.assertCountEqual(
Stock.objects.filter(
short=ExpressionWrapper(
Q(qty_available__lt=F("product__qty_target")),
output_field=BooleanField(),
)
),
[stock_1, stock_2],
)
class LookupQueryingTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.s1 = Season.objects.create(year=1942, gt=1942)
cls.s2 = Season.objects.create(year=1842, gt=1942, nulled_text_field="text")
cls.s3 = Season.objects.create(year=2042, gt=1942)
def test_annotate(self):
qs = Season.objects.annotate(equal=Exact(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "equal"),
((1942, True), (1842, False), (2042, False)),
)
def test_alias(self):
qs = Season.objects.alias(greater=GreaterThan(F("year"), 1910))
self.assertCountEqual(qs.filter(greater=True), [self.s1, self.s3])
def test_annotate_value_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(Value(40), Value(30)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, True), (2042, True)),
)
def test_annotate_field_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), F("gt")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), Value(1930)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_literal(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), 1930))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_literal_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(1930, F("year")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, True), (2042, False)),
)
def test_annotate_less_than_float(self):
qs = Season.objects.annotate(lesser=LessThan(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "lesser"),
((1942, True), (1842, True), (2042, False)),
)
def test_annotate_greater_than_or_equal(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_greater_than_or_equal_float(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_combined_lookups(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression)
self.assertCountEqual(
qs.values_list("year", "gte"),
((1942, True), (1842, False), (2042, True)),
)
def test_lookup_in_filter(self):
qs = Season.objects.filter(GreaterThan(F("year"), 1910))
self.assertCountEqual(qs, [self.s1, self.s3])
def test_isnull_lookup_in_filter(self):
self.assertSequenceEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), False)),
[self.s2],
)
self.assertCountEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), True)),
[self.s1, self.s3],
)
def test_filter_lookup_lhs(self):
qs = Season.objects.annotate(before_20=LessThan(F("year"), 2000)).filter(
before_20=LessThan(F("year"), 1900),
)
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_wrapped_lookup_lhs(self):
qs = (
Season.objects.annotate(
before_20=ExpressionWrapper(
Q(year__lt=2000),
output_field=BooleanField(),
)
)
.filter(before_20=LessThan(F("year"), 1900))
.values_list("year", flat=True)
)
self.assertCountEqual(qs, [1842, 2042])
def test_filter_exists_lhs(self):
qs = Season.objects.annotate(
before_20=Exists(
Season.objects.filter(pk=OuterRef("pk"), year__lt=2000),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_subquery_lhs(self):
qs = Season.objects.annotate(
before_20=Subquery(
Season.objects.filter(pk=OuterRef("pk")).values(
lesser=LessThan(F("year"), 2000),
),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_combined_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.filter(expression)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=True)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter_false(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=False)
self.assertSequenceEqual(qs, [self.s2])
def test_lookup_in_order_by(self):
qs = Season.objects.order_by(LessThan(F("year"), 1910), F("year"))
self.assertSequenceEqual(qs, [self.s1, self.s3, self.s2])
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_aggregate_combined_lookup(self):
expression = Cast(GreaterThan(F("year"), 1900), models.IntegerField())
qs = Season.objects.aggregate(modern=models.Sum(expression))
self.assertEqual(qs["modern"], 2)
def test_conditional_expression(self):
qs = Season.objects.annotate(
century=Case(
When(
GreaterThan(F("year"), 1900) & LessThanOrEqual(F("year"), 2000),
then=Value("20th"),
),
default=Value("other"),
)
).values("year", "century")
self.assertCountEqual(
qs,
[
{"year": 1942, "century": "20th"},
{"year": 1842, "century": "other"},
{"year": 2042, "century": "other"},
],
)
|
db08c5b3076eccf455c0855db69d2be01a1a7d18595cd0255601307657b5a18d | """
The lookup API
This demonstrates features of the database API.
"""
from django.db import models
from django.db.models.lookups import IsNull
class Alarm(models.Model):
desc = models.CharField(max_length=100)
time = models.TimeField()
def __str__(self):
return "%s (%s)" % (self.time, self.desc)
class Author(models.Model):
name = models.CharField(max_length=100)
alias = models.CharField(max_length=50, null=True, blank=True)
class Meta:
ordering = ("name",)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True)
slug = models.SlugField(unique=True, blank=True, null=True)
class Meta:
ordering = ("-pub_date", "headline")
def __str__(self):
return self.headline
class Tag(models.Model):
articles = models.ManyToManyField(Article)
name = models.CharField(max_length=100)
class Meta:
ordering = ("name",)
class NulledTextField(models.TextField):
def get_prep_value(self, value):
return None if value == "" else value
class NullField(models.Field):
pass
NullField.register_lookup(IsNull)
@NulledTextField.register_lookup
class NulledTransform(models.Transform):
lookup_name = "nulled"
template = "NULL"
@property
def output_field(self):
return NullField()
@NulledTextField.register_lookup
class IsNullWithNoneAsRHS(IsNull):
lookup_name = "isnull_none_rhs"
can_use_none_as_rhs = True
class Season(models.Model):
year = models.PositiveSmallIntegerField()
gt = models.IntegerField(null=True, blank=True)
nulled_text_field = NulledTextField(null=True)
class Meta:
constraints = [
models.UniqueConstraint(fields=["year"], name="season_year_unique"),
]
def __str__(self):
return str(self.year)
class Game(models.Model):
season = models.ForeignKey(Season, models.CASCADE, related_name="games")
home = models.CharField(max_length=100)
away = models.CharField(max_length=100)
class Player(models.Model):
name = models.CharField(max_length=100)
games = models.ManyToManyField(Game, related_name="players")
class Product(models.Model):
name = models.CharField(max_length=80)
qty_target = models.DecimalField(max_digits=6, decimal_places=2)
class Stock(models.Model):
product = models.ForeignKey(Product, models.CASCADE)
short = models.BooleanField(default=False)
qty_available = models.DecimalField(max_digits=6, decimal_places=2)
class Freebie(models.Model):
gift_product = models.ForeignKey(Product, models.CASCADE)
stock_id = models.IntegerField(blank=True, null=True)
stock = models.ForeignObject(
Stock,
from_fields=["stock_id", "gift_product"],
to_fields=["id", "product"],
on_delete=models.CASCADE,
)
|
eeebdf8eec89125504dae2a23334c0bba0906188ff734aab76cdd7d80275c121 | import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError,
HttpResponse,
HttpResponseNotAllowed,
HttpResponseNotModified,
HttpResponsePermanentRedirect,
HttpResponseRedirect,
JsonResponse,
QueryDict,
SimpleCookie,
StreamingHttpResponse,
parse_cookie,
)
from django.test import SimpleTestCase
from django.utils.functional import lazystr
class QueryDictTests(SimpleTestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(""))
def test_missing_key(self):
q = QueryDict()
with self.assertRaises(KeyError):
q.__getitem__("foo")
def test_immutability(self):
q = QueryDict()
with self.assertRaises(AttributeError):
q.__setitem__("something", "bar")
with self.assertRaises(AttributeError):
q.setlist("foo", ["bar"])
with self.assertRaises(AttributeError):
q.appendlist("foo", ["bar"])
with self.assertRaises(AttributeError):
q.update({"foo": "bar"})
with self.assertRaises(AttributeError):
q.pop("foo")
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get("foo", "default"), "default")
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist("foo"), [])
self.assertNotIn("foo", q)
self.assertEqual(list(q), [])
self.assertEqual(list(q.items()), [])
self.assertEqual(list(q.lists()), [])
self.assertEqual(list(q.keys()), [])
self.assertEqual(list(q.values()), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), "")
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict("foo=bar")
self.assertEqual(q["foo"], "bar")
with self.assertRaises(KeyError):
q.__getitem__("bar")
with self.assertRaises(AttributeError):
q.__setitem__("something", "bar")
self.assertEqual(q.get("foo", "default"), "bar")
self.assertEqual(q.get("bar", "default"), "default")
self.assertEqual(q.getlist("foo"), ["bar"])
self.assertEqual(q.getlist("bar"), [])
with self.assertRaises(AttributeError):
q.setlist("foo", ["bar"])
with self.assertRaises(AttributeError):
q.appendlist("foo", ["bar"])
self.assertIn("foo", q)
self.assertNotIn("bar", q)
self.assertEqual(list(q), ["foo"])
self.assertEqual(list(q.items()), [("foo", "bar")])
self.assertEqual(list(q.lists()), [("foo", ["bar"])])
self.assertEqual(list(q.keys()), ["foo"])
self.assertEqual(list(q.values()), ["bar"])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({"foo": "bar"})
with self.assertRaises(AttributeError):
q.pop("foo")
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault("foo", "bar")
self.assertEqual(q.urlencode(), "foo=bar")
def test_urlencode(self):
q = QueryDict(mutable=True)
q["next"] = "/a&b/"
self.assertEqual(q.urlencode(), "next=%2Fa%26b%2F")
self.assertEqual(q.urlencode(safe="/"), "next=/a%26b/")
q = QueryDict(mutable=True)
q["next"] = "/t\xebst&key/"
self.assertEqual(q.urlencode(), "next=%2Ft%C3%ABst%26key%2F")
self.assertEqual(q.urlencode(safe="/"), "next=/t%C3%ABst%26key/")
def test_urlencode_int(self):
# Normally QueryDict doesn't contain non-string values but lazily
# written tests may make that mistake.
q = QueryDict(mutable=True)
q["a"] = 1
self.assertEqual(q.urlencode(), "a=1")
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
with self.assertRaises(KeyError):
q.__getitem__("foo")
q["name"] = "john"
self.assertEqual(q["name"], "john")
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q["name"] = "john"
del q["name"]
self.assertNotIn("name", q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q["name"] = "john"
self.assertEqual(q.get("foo", "default"), "default")
self.assertEqual(q.get("name", "default"), "john")
self.assertEqual(q.getlist("name"), ["john"])
self.assertEqual(q.getlist("foo"), [])
q.setlist("foo", ["bar", "baz"])
self.assertEqual(q.get("foo", "default"), "baz")
self.assertEqual(q.getlist("foo"), ["bar", "baz"])
q.appendlist("foo", "another")
self.assertEqual(q.getlist("foo"), ["bar", "baz", "another"])
self.assertEqual(q["foo"], "another")
self.assertIn("foo", q)
self.assertCountEqual(q, ["foo", "name"])
self.assertCountEqual(q.items(), [("foo", "another"), ("name", "john")])
self.assertCountEqual(
q.lists(), [("foo", ["bar", "baz", "another"]), ("name", ["john"])]
)
self.assertCountEqual(q.keys(), ["foo", "name"])
self.assertCountEqual(q.values(), ["another", "john"])
q.update({"foo": "hello"})
self.assertEqual(q["foo"], "hello")
self.assertEqual(q.get("foo", "not available"), "hello")
self.assertEqual(q.getlist("foo"), ["bar", "baz", "another", "hello"])
self.assertEqual(q.pop("foo"), ["bar", "baz", "another", "hello"])
self.assertEqual(q.pop("foo", "not there"), "not there")
self.assertEqual(q.get("foo", "not there"), "not there")
self.assertEqual(q.setdefault("foo", "bar"), "bar")
self.assertEqual(q["foo"], "bar")
self.assertEqual(q.getlist("foo"), ["bar"])
self.assertIn(q.urlencode(), ["foo=bar&name=john", "name=john&foo=bar"])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict("vote=yes&vote=no")
self.assertEqual(q["vote"], "no")
with self.assertRaises(AttributeError):
q.__setitem__("something", "bar")
self.assertEqual(q.get("vote", "default"), "no")
self.assertEqual(q.get("foo", "default"), "default")
self.assertEqual(q.getlist("vote"), ["yes", "no"])
self.assertEqual(q.getlist("foo"), [])
with self.assertRaises(AttributeError):
q.setlist("foo", ["bar", "baz"])
with self.assertRaises(AttributeError):
q.setlist("foo", ["bar", "baz"])
with self.assertRaises(AttributeError):
q.appendlist("foo", ["bar"])
self.assertIn("vote", q)
self.assertNotIn("foo", q)
self.assertEqual(list(q), ["vote"])
self.assertEqual(list(q.items()), [("vote", "no")])
self.assertEqual(list(q.lists()), [("vote", ["yes", "no"])])
self.assertEqual(list(q.keys()), ["vote"])
self.assertEqual(list(q.values()), ["no"])
self.assertEqual(len(q), 1)
with self.assertRaises(AttributeError):
q.update({"foo": "bar"})
with self.assertRaises(AttributeError):
q.pop("foo")
with self.assertRaises(AttributeError):
q.popitem()
with self.assertRaises(AttributeError):
q.clear()
with self.assertRaises(AttributeError):
q.setdefault("foo", "bar")
with self.assertRaises(AttributeError):
q.__delitem__("vote")
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict("a=b&c=d")
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
q = QueryDict("a=b&c=d&a=1")
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q, q1)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict("a=1&a=2", mutable=True)
y = QueryDict("a=3&a=4")
x.update(y)
self.assertEqual(x.getlist("a"), ["1", "2", "3", "4"])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict("cur=%A4", encoding="iso-8859-15")
self.assertEqual(q.encoding, "iso-8859-15")
self.assertEqual(list(q.items()), [("cur", "€")])
self.assertEqual(q.urlencode(), "cur=%A4")
q = q.copy()
self.assertEqual(q.encoding, "iso-8859-15")
self.assertEqual(list(q.items()), [("cur", "€")])
self.assertEqual(q.urlencode(), "cur=%A4")
self.assertEqual(copy.copy(q).encoding, "iso-8859-15")
self.assertEqual(copy.deepcopy(q).encoding, "iso-8859-15")
def test_querydict_fromkeys(self):
self.assertEqual(
QueryDict.fromkeys(["key1", "key2", "key3"]), QueryDict("key1&key2&key3")
)
def test_fromkeys_with_nonempty_value(self):
self.assertEqual(
QueryDict.fromkeys(["key1", "key2", "key3"], value="val"),
QueryDict("key1=val&key2=val&key3=val"),
)
def test_fromkeys_is_immutable_by_default(self):
# Match behavior of __init__() which is also immutable by default.
q = QueryDict.fromkeys(["key1", "key2", "key3"])
with self.assertRaisesMessage(
AttributeError, "This QueryDict instance is immutable"
):
q["key4"] = "nope"
def test_fromkeys_mutable_override(self):
q = QueryDict.fromkeys(["key1", "key2", "key3"], mutable=True)
q["key4"] = "yep"
self.assertEqual(q, QueryDict("key1&key2&key3&key4=yep"))
def test_duplicates_in_fromkeys_iterable(self):
self.assertEqual(QueryDict.fromkeys("xyzzy"), QueryDict("x&y&z&z&y"))
def test_fromkeys_with_nondefault_encoding(self):
key_utf16 = b"\xff\xfe\x8e\x02\xdd\x01\x9e\x02"
value_utf16 = b"\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02"
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding="utf-16")
expected = QueryDict("", mutable=True)
expected["ʎǝʞ"] = "ǝnlɐʌ"
self.assertEqual(q, expected)
def test_fromkeys_empty_iterable(self):
self.assertEqual(QueryDict.fromkeys([]), QueryDict(""))
def test_fromkeys_noniterable(self):
with self.assertRaises(TypeError):
QueryDict.fromkeys(0)
class HttpResponseTests(SimpleTestCase):
def test_headers_type(self):
r = HttpResponse()
# ASCII strings or bytes values are converted to strings.
r.headers["key"] = "test"
self.assertEqual(r.headers["key"], "test")
r.headers["key"] = b"test"
self.assertEqual(r.headers["key"], "test")
self.assertIn(b"test", r.serialize_headers())
# Non-ASCII values are serialized to Latin-1.
r.headers["key"] = "café"
self.assertIn("café".encode("latin-1"), r.serialize_headers())
# Other Unicode values are MIME-encoded (there's no way to pass them as
# bytes).
r.headers["key"] = "†"
self.assertEqual(r.headers["key"], "=?utf-8?b?4oCg?=")
self.assertIn(b"=?utf-8?b?4oCg?=", r.serialize_headers())
# The response also converts string or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r.headers["Content-Type"]
r.headers["foo"] = "bar"
headers = list(r.headers.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ("foo", "bar"))
r = HttpResponse()
del r.headers["Content-Type"]
r.headers[b"foo"] = "bar"
headers = list(r.headers.items())
self.assertEqual(len(headers), 1)
self.assertEqual(headers[0], ("foo", "bar"))
self.assertIsInstance(headers[0][0], str)
r = HttpResponse()
with self.assertRaises(UnicodeError):
r.headers.__setitem__("føø", "bar")
with self.assertRaises(UnicodeError):
r.headers.__setitem__("føø".encode(), "bar")
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = b"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88"
f = f.decode("utf-8")
h.headers["Content-Disposition"] = 'attachment; filename="%s"' % f
# This one is triggering https://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h.headers[
"Content-Disposition"
] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
with self.assertRaises(BadHeaderError):
r.headers.__setitem__("test\rstr", "test")
with self.assertRaises(BadHeaderError):
r.headers.__setitem__("test\nstr", "test")
def test_encoded_with_newlines_in_headers(self):
"""
Keys & values which throw a UnicodeError when encoding/decoding should
still be checked for newlines and re-raised as a BadHeaderError.
These specifically would still throw BadHeaderError after decoding
successfully, because the newlines are sandwiched in the middle of the
string and email.Header leaves those as they are.
"""
r = HttpResponse()
pairs = (
("†\nother", "test"),
("test", "†\nother"),
(b"\xe2\x80\xa0\nother", "test"),
("test", b"\xe2\x80\xa0\nother"),
)
msg = "Header values can't contain newlines"
for key, value in pairs:
with self.subTest(key=key, value=value):
with self.assertRaisesMessage(BadHeaderError, msg):
r[key] = value
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertIsNone(r.get("test"))
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b"12345")
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b"12345")
def test_memoryview_content(self):
r = HttpResponse(memoryview(b"memoryview"))
self.assertEqual(r.content, b"memoryview")
def test_iter_content(self):
r = HttpResponse(["abc", "def", "ghi"])
self.assertEqual(r.content, b"abcdefghi")
# test iter content via property
r = HttpResponse()
r.content = ["idan", "alex", "jacob"]
self.assertEqual(r.content, b"idanalexjacob")
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b"123")
# test odd inputs
r = HttpResponse()
r.content = ["1", "2", 3, "\u079e"]
# '\xde\x9e' == unichr(1950).encode()
self.assertEqual(r.content, b"123\xde\x9e")
# .content can safely be accessed multiple times.
r = HttpResponse(iter(["hello", "world"]))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b"helloworld")
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b"".join(r), b"helloworld")
self.assertEqual(b"".join(r), b"helloworld")
# Accessing .content still works.
self.assertEqual(r.content, b"helloworld")
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(["hello", "world"]))
self.assertEqual(b"".join(r), b"helloworld")
self.assertEqual(r.content, b"helloworld")
# Additional content can be written to the response.
r = HttpResponse(iter(["hello", "world"]))
self.assertEqual(r.content, b"helloworld")
r.write("!")
self.assertEqual(r.content, b"helloworld!")
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse("abc")
i = iter(r)
self.assertEqual(list(i), [b"abc"])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr("helloworld"))
self.assertEqual(r.content, b"helloworld")
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(["abc"])
r.write("def")
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b"abcdef")
# with Content-Encoding header
r = HttpResponse()
r.headers["Content-Encoding"] = "winning"
r.write(b"abc")
r.write(b"def")
self.assertEqual(r.content, b"abcdef")
def test_stream_interface(self):
r = HttpResponse("asdf")
self.assertEqual(r.getvalue(), b"asdf")
r = HttpResponse()
self.assertIs(r.writable(), True)
r.writelines(["foo\n", "bar\n", "baz\n"])
self.assertEqual(r.content, b"foo\nbar\nbaz\n")
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
"mailto:[email protected]",
"file:///etc/passwd",
]
for url in bad_urls:
with self.assertRaises(DisallowedRedirect):
HttpResponseRedirect(url)
with self.assertRaises(DisallowedRedirect):
HttpResponsePermanentRedirect(url)
def test_header_deletion(self):
r = HttpResponse("hello")
r.headers["X-Foo"] = "foo"
del r.headers["X-Foo"]
self.assertNotIn("X-Foo", r.headers)
# del doesn't raise a KeyError on nonexistent headers.
del r.headers["X-Foo"]
def test_instantiate_with_headers(self):
r = HttpResponse("hello", headers={"X-Foo": "foo"})
self.assertEqual(r.headers["X-Foo"], "foo")
self.assertEqual(r.headers["x-foo"], "foo")
def test_content_type(self):
r = HttpResponse("hello", content_type="application/json")
self.assertEqual(r.headers["Content-Type"], "application/json")
def test_content_type_headers(self):
r = HttpResponse("hello", headers={"Content-Type": "application/json"})
self.assertEqual(r.headers["Content-Type"], "application/json")
def test_content_type_mutually_exclusive(self):
msg = (
"'headers' must not contain 'Content-Type' when the "
"'content_type' parameter is provided."
)
with self.assertRaisesMessage(ValueError, msg):
HttpResponse(
"hello",
content_type="application/json",
headers={"Content-Type": "text/csv"},
)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect("/redirected/")
self.assertEqual(response.status_code, 302)
# Standard HttpResponse init args can be used
response = HttpResponseRedirect(
"/redirected/",
content="The resource has temporarily moved",
)
self.assertContains(
response, "The resource has temporarily moved", status_code=302
)
self.assertEqual(response.url, response.headers["Location"])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr("/redirected/"))
self.assertEqual(r.url, "/redirected/")
def test_redirect_repr(self):
response = HttpResponseRedirect("/redirected/")
expected = (
'<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", '
'url="/redirected/">'
)
self.assertEqual(repr(response), expected)
def test_invalid_redirect_repr(self):
"""
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
should work (in the debug view, for example).
"""
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
with self.assertRaisesMessage(
DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"
):
HttpResponseRedirect.__init__(response, "ssh://foo")
expected = (
'<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", '
'url="ssh://foo">'
)
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn("content-type", response)
def test_not_modified_repr(self):
response = HttpResponseNotModified()
self.assertEqual(repr(response), "<HttpResponseNotModified status_code=304>")
def test_not_allowed(self):
response = HttpResponseNotAllowed(["GET"])
self.assertEqual(response.status_code, 405)
# Standard HttpResponse init args can be used
response = HttpResponseNotAllowed(
["GET"], content="Only the GET method is allowed"
)
self.assertContains(response, "Only the GET method is allowed", status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(["GET", "OPTIONS"], content_type="text/plain")
expected = (
'<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
)
self.assertEqual(repr(response), expected)
def test_not_allowed_repr_no_content_type(self):
response = HttpResponseNotAllowed(("GET", "POST"))
del response.headers["Content-Type"]
self.assertEqual(
repr(response), "<HttpResponseNotAllowed [GET, POST] status_code=405>"
)
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {"key": "łóżko"}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(
TypeError,
"In order to allow non-dict objects to be serialized set the "
"safe parameter to False",
):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse("foobar", safe=False)
self.assertEqual(json.loads(response.content.decode()), "foobar")
def test_json_response_list(self):
response = JsonResponse(["foo", "bar"], safe=False)
self.assertEqual(json.loads(response.content.decode()), ["foo", "bar"])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({"foo": "bar"})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {"foo": "bar"})
def test_json_response_passing_arguments_to_json_dumps(self):
response = JsonResponse({"foo": "bar"}, json_dumps_params={"indent": 2})
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(["hello", "world"]))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b"hello", b"world"])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(["abc", "def"])
self.assertEqual(list(r), [b"abc", b"def"])
self.assertEqual(list(r), [])
# iterating over strings still yields bytestring chunks.
r.streaming_content = iter(["hello", "café"])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode()
self.assertEqual(chunks, [b"hello", b"caf\xc3\xa9"])
for chunk in chunks:
self.assertIsInstance(chunk, bytes)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, "content"))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = "xyz"
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, "streaming_content"))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(["abc", "def"])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b"ABC", b"DEF"])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(["hello", "world"]))
self.assertEqual(bytes(r), b"Content-Type: text/html; charset=utf-8")
# and this won't consume its content.
self.assertEqual(list(r), [b"hello", b"world"])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(["hello", "world"]))
with self.assertRaises(Exception):
r.write("!")
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(["hello", "world"]))
self.assertEqual(r.getvalue(), b"helloworld")
def test_repr(self):
r = StreamingHttpResponse(iter(["hello", "café"]))
self.assertEqual(
repr(r),
'<StreamingHttpResponse status_code=200, "text/html; charset=utf-8">',
)
async def test_async_streaming_response(self):
async def async_iter():
yield b"hello"
yield b"world"
r = StreamingHttpResponse(async_iter())
chunks = []
async for chunk in r:
chunks.append(chunk)
self.assertEqual(chunks, [b"hello", b"world"])
def test_async_streaming_response_warning(self):
async def async_iter():
yield b"hello"
yield b"world"
r = StreamingHttpResponse(async_iter())
msg = (
"StreamingHttpResponse must consume asynchronous iterators in order to "
"serve them synchronously. Use a synchronous iterator instead."
)
with self.assertWarnsMessage(Warning, msg):
self.assertEqual(list(r), [b"hello", b"world"])
async def test_sync_streaming_response_warning(self):
r = StreamingHttpResponse(iter(["hello", "world"]))
msg = (
"StreamingHttpResponse must consume synchronous iterators in order to "
"serve them asynchronously. Use an asynchronous iterator instead."
)
with self.assertWarnsMessage(Warning, msg):
self.assertEqual(b"hello", await r.__aiter__().__anext__())
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(__file__), "abc.txt")
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertTrue(file1.closed)
r.close()
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(__file__), "abc.txt")
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""Semicolons and commas are encoded."""
c = SimpleCookie()
c["test"] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(";")) # IE compat
self.assertNotIn(",", c.output().rstrip(";")) # Safari compat
def test_decode(self):
"""Semicolons and commas are decoded."""
c = SimpleCookie()
c["test"] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c["test"].value, c2["test"].value)
c3 = parse_cookie(c.output()[12:])
self.assertEqual(c["test"].value, c3["test"])
def test_nonstandard_keys(self):
"""
A single non-standard cookie name doesn't affect all cookies (#13007).
"""
self.assertIn("good_cookie", parse_cookie("good_cookie=yes;bad:cookie=yes"))
def test_repeated_nonstandard_keys(self):
"""
A repeated non-standard name doesn't affect all cookies (#15852).
"""
self.assertIn("good_cookie", parse_cookie("a:=b; a:=c; good_cookie=yes"))
def test_python_cookies(self):
"""
Test cases copied from Python's Lib/test/test_http_cookies.py
"""
self.assertEqual(
parse_cookie("chips=ahoy; vienna=finger"),
{"chips": "ahoy", "vienna": "finger"},
)
# Here parse_cookie() differs from Python's cookie parsing in that it
# treats all semicolons as delimiters, even within quotes.
self.assertEqual(
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
{"keebler": '"E=mc2', "L": '\\"Loves\\"', "fudge": "\\012", "": '"'},
)
# Illegal cookies that have an '=' char in an unquoted value.
self.assertEqual(parse_cookie("keebler=E=mc2"), {"keebler": "E=mc2"})
# Cookies with ':' character in their name.
self.assertEqual(
parse_cookie("key:term=value:term"), {"key:term": "value:term"}
)
# Cookies with '[' and ']'.
self.assertEqual(
parse_cookie("a=b; c=[; d=r; f=h"), {"a": "b", "c": "[", "d": "r", "f": "h"}
)
def test_cookie_edgecases(self):
# Cookies that RFC 6265 allows.
self.assertEqual(
parse_cookie("a=b; Domain=example.com"), {"a": "b", "Domain": "example.com"}
)
# parse_cookie() has historically kept only the last cookie with the
# same name.
self.assertEqual(parse_cookie("a=b; h=i; a=c"), {"a": "c", "h": "i"})
def test_invalid_cookies(self):
"""
Cookie strings that go against RFC 6265 but browsers will send if set
via document.cookie.
"""
# Chunks without an equals sign appear as unnamed values per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
self.assertIn(
"django_language", parse_cookie("abc=def; unnamed; django_language=en")
)
# Even a double quote may be an unnamed value.
self.assertEqual(parse_cookie('a=b; "; c=d'), {"a": "b", "": '"', "c": "d"})
# Spaces in names and values, and an equals sign in values.
self.assertEqual(
parse_cookie("a b c=d e = f; gh=i"), {"a b c": "d e = f", "gh": "i"}
)
# More characters the spec forbids.
self.assertEqual(
parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'),
{"a b,c<>@:/[]?{}": 'd " =e,f g'},
)
# Unicode characters. The spec only allows ASCII.
self.assertEqual(
parse_cookie("saint=André Bessette"), {"saint": "André Bessette"}
)
# Browsers don't send extra whitespace or semicolons in Cookie headers,
# but parse_cookie() should parse whitespace the same way
# document.cookie parses whitespace.
self.assertEqual(
parse_cookie(" = b ; ; = ; c = ; "), {"": "b", "c": ""}
)
def test_samesite(self):
c = SimpleCookie("name=value; samesite=lax; httponly")
self.assertEqual(c["name"]["samesite"], "lax")
self.assertIn("SameSite=lax", c.output())
def test_httponly_after_load(self):
c = SimpleCookie()
c.load("name=val")
c["name"]["httponly"] = True
self.assertTrue(c["name"]["httponly"])
def test_load_dict(self):
c = SimpleCookie()
c.load({"name": "val"})
self.assertEqual(c["name"].value, "val")
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = "Set-Cookie: %s" % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
class HttpResponseHeadersTestCase(SimpleTestCase):
"""Headers by treating HttpResponse like a dictionary."""
def test_headers(self):
response = HttpResponse()
response["X-Foo"] = "bar"
self.assertEqual(response["X-Foo"], "bar")
self.assertEqual(response.headers["X-Foo"], "bar")
self.assertIn("X-Foo", response)
self.assertIs(response.has_header("X-Foo"), True)
del response["X-Foo"]
self.assertNotIn("X-Foo", response)
self.assertNotIn("X-Foo", response.headers)
# del doesn't raise a KeyError on nonexistent headers.
del response["X-Foo"]
def test_headers_as_iterable_of_tuple_pairs(self):
response = HttpResponse(headers=(("X-Foo", "bar"),))
self.assertEqual(response["X-Foo"], "bar")
def test_headers_bytestring(self):
response = HttpResponse()
response["X-Foo"] = b"bar"
self.assertEqual(response["X-Foo"], "bar")
self.assertEqual(response.headers["X-Foo"], "bar")
def test_newlines_in_headers(self):
response = HttpResponse()
with self.assertRaises(BadHeaderError):
response["test\rstr"] = "test"
with self.assertRaises(BadHeaderError):
response["test\nstr"] = "test"
|
f96c4993cf0eb159fd2b4efa192e9dad263c0edd1a31914786d09f8f5e3758cf | import asyncio
import os
from unittest import mock
from asgiref.sync import async_to_sync, iscoroutinefunction
from django.core.cache import DEFAULT_CACHE_ALIAS, caches
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
from django.http import HttpResponse, HttpResponseNotAllowed
from django.test import RequestFactory, SimpleTestCase
from django.utils.asyncio import async_unsafe
from django.views.generic.base import View
from .models import SimpleModel
class CacheTest(SimpleTestCase):
def test_caches_local(self):
@async_to_sync
async def async_cache():
return caches[DEFAULT_CACHE_ALIAS]
cache_1 = async_cache()
cache_2 = async_cache()
self.assertIs(cache_1, cache_2)
class DatabaseConnectionTest(SimpleTestCase):
"""A database connection cannot be used in an async context."""
async def test_get_async_connection(self):
with self.assertRaises(SynchronousOnlyOperation):
list(SimpleModel.objects.all())
class AsyncUnsafeTest(SimpleTestCase):
"""
async_unsafe decorator should work correctly and returns the correct
message.
"""
@async_unsafe
def dangerous_method(self):
return True
async def test_async_unsafe(self):
# async_unsafe decorator catches bad access and returns the right
# message.
msg = (
"You cannot call this from an async context - use a thread or "
"sync_to_async."
)
with self.assertRaisesMessage(SynchronousOnlyOperation, msg):
self.dangerous_method()
@mock.patch.dict(os.environ, {"DJANGO_ALLOW_ASYNC_UNSAFE": "true"})
@async_to_sync # mock.patch() is not async-aware.
async def test_async_unsafe_suppressed(self):
# Decorator doesn't trigger check when the environment variable to
# suppress it is set.
try:
self.dangerous_method()
except SynchronousOnlyOperation:
self.fail("SynchronousOnlyOperation should not be raised.")
class SyncView(View):
def get(self, request, *args, **kwargs):
return HttpResponse("Hello (sync) world!")
class AsyncView(View):
async def get(self, request, *args, **kwargs):
return HttpResponse("Hello (async) world!")
class ViewTests(SimpleTestCase):
def test_views_are_correctly_marked(self):
tests = [
(SyncView, False),
(AsyncView, True),
]
for view_cls, is_async in tests:
with self.subTest(view_cls=view_cls, is_async=is_async):
self.assertIs(view_cls.view_is_async, is_async)
callback = view_cls.as_view()
self.assertIs(iscoroutinefunction(callback), is_async)
def test_mixed_views_raise_error(self):
class MixedView(View):
def get(self, request, *args, **kwargs):
return HttpResponse("Hello (mixed) world!")
async def post(self, request, *args, **kwargs):
return HttpResponse("Hello (mixed) world!")
msg = (
f"{MixedView.__qualname__} HTTP handlers must either be all sync or all "
"async."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
MixedView.as_view()
def test_options_handler_responds_correctly(self):
tests = [
(SyncView, False),
(AsyncView, True),
]
for view_cls, is_coroutine in tests:
with self.subTest(view_cls=view_cls, is_coroutine=is_coroutine):
instance = view_cls()
response = instance.options(None)
self.assertIs(
asyncio.iscoroutine(response),
is_coroutine,
)
if is_coroutine:
response = asyncio.run(response)
self.assertIsInstance(response, HttpResponse)
def test_http_method_not_allowed_responds_correctly(self):
request_factory = RequestFactory()
tests = [
(SyncView, False),
(AsyncView, True),
]
for view_cls, is_coroutine in tests:
with self.subTest(view_cls=view_cls, is_coroutine=is_coroutine):
instance = view_cls()
response = instance.http_method_not_allowed(request_factory.post("/"))
self.assertIs(
asyncio.iscoroutine(response),
is_coroutine,
)
if is_coroutine:
response = asyncio.run(response)
self.assertIsInstance(response, HttpResponseNotAllowed)
def test_base_view_class_is_sync(self):
"""
View and by extension any subclasses that don't define handlers are
sync.
"""
self.assertIs(View.view_is_async, False)
|
913f38ca8a1f4a233dee92ee5851eb23fc6a91b82419e92159dc227b6e24ff2b | import mimetypes
import unittest
from os import path
from urllib.parse import quote
from django.conf.urls.static import static
from django.core.exceptions import ImproperlyConfigured
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = "site_media"
def test_serve(self):
"The static view can serve static media"
media_files = ["file.txt", "file.txt.gz", "%2F.txt"]
for filename in media_files:
response = self.client.get("/%s/%s" % (self.prefix, quote(filename)))
response_content = b"".join(response)
file_path = path.join(media_dir, filename)
with open(file_path, "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(
len(response_content), int(response.headers["Content-Length"])
)
self.assertEqual(
mimetypes.guess_type(file_path)[1],
response.get("Content-Encoding", None),
)
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get("/%s/%s" % (self.prefix, "long-line.txt"))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get("/%s/file.unknown" % self.prefix)
self.assertEqual("application/octet-stream", response.headers["Content-Type"])
response.close()
def test_copes_with_empty_path_component(self):
file_name = "file.txt"
response = self.client.get("/%s//%s" % (self.prefix, file_name))
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = "file.txt"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": "Thu, 1 Jan 1970 00:00:00 GMT"},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = "file.txt"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
"if-modified-since": "Mon, 18 Jan 2038 05:14:07 GMT"
},
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
9110 Section 13.1.3.
"""
file_name = "file.txt"
invalid_date = "Mon, 28 May 999999999999 28:25:26 GMT"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": invalid_date},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response.headers["Content-Length"]))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
9110 Section 13.1.3.
"""
file_name = "file.txt"
invalid_date = ": 1291108438, Wed, 20 Oct 2010 14:05:00 GMT"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": invalid_date},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response.headers["Content-Length"]))
def test_404(self):
response = self.client.get("/%s/nonexistent_resource" % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get("/%s/" % self.prefix)
self.assertContains(response, "Index of ./")
# Directories have a trailing slash.
self.assertIn("subdir/", response.context["file_list"])
def test_index_subdir(self):
response = self.client.get("/%s/subdir/" % self.prefix)
self.assertContains(response, "Index of subdir/")
# File with a leading dot (e.g. .hidden) aren't displayed.
self.assertEqual(response.context["file_list"], ["visible"])
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
"static/directory_index.html": "Test index",
},
),
],
},
}
]
)
def test_index_custom_template(self):
response = self.client.get("/%s/" % self.prefix)
self.assertEqual(response.content, b"Test index")
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super().setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static("media/", document_root=media_dir)
def tearDown(self):
super().tearDown()
urls.urlpatterns = self._old_views_urlpatterns
def test_prefix(self):
self.assertEqual(static("test")[0].pattern.regex.pattern, "^test(?P<path>.*)$")
@override_settings(DEBUG=False)
def test_debug_off(self):
"""No URLs are served if DEBUG=False."""
self.assertEqual(static("test"), [])
def test_empty_prefix(self):
with self.assertRaisesMessage(
ImproperlyConfigured, "Empty static prefix not permitted"
):
static("")
def test_special_prefix(self):
"""No URLs are served if prefix contains a netloc part."""
self.assertEqual(static("http://example.org"), [])
self.assertEqual(static("//example.org"), [])
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
def test_was_modified_since_empty_string(self):
self.assertTrue(was_modified_since(header="", mtime=1))
|
7bcfe50de4933e89102e0c485908d9cba8a95533c4a31a0b1630a2363aa11c13 | from django.template import TemplateDoesNotExist
from django.test import Client, RequestFactory, SimpleTestCase, override_settings
from django.utils.translation import override
from django.views.csrf import CSRF_FAILURE_TEMPLATE_NAME, csrf_failure
@override_settings(ROOT_URLCONF="view_tests.urls")
class CsrfViewTests(SimpleTestCase):
def setUp(self):
super().setUp()
self.client = Client(enforce_csrf_checks=True)
@override_settings(
USE_I18N=True,
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
],
)
def test_translation(self):
"""An invalid request is rejected with a localized error message."""
response = self.client.post("/")
self.assertContains(response, "Forbidden", status_code=403)
self.assertContains(
response, "CSRF verification failed. Request aborted.", status_code=403
)
with self.settings(LANGUAGE_CODE="nl"), override("en-us"):
response = self.client.post("/")
self.assertContains(response, "Verboden", status_code=403)
self.assertContains(
response,
"CSRF-verificatie mislukt. Verzoek afgebroken.",
status_code=403,
)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_no_referer(self):
"""
Referer header is strictly checked for POST over HTTPS. Trigger the
exception by sending an incorrect referer.
"""
response = self.client.post("/", headers={"x-forwarded-proto": "https"})
self.assertContains(
response,
"You are seeing this message because this HTTPS site requires a "
"“Referer header” to be sent by your web browser, but "
"none was sent.",
status_code=403,
)
self.assertContains(
response,
"If you have configured your browser to disable “Referer” "
"headers, please re-enable them, at least for this site, or for "
"HTTPS connections, or for “same-origin” requests.",
status_code=403,
)
self.assertContains(
response,
"If you are using the <meta name="referrer" "
"content="no-referrer"> tag or including the "
"“Referrer-Policy: no-referrer” header, please remove them.",
status_code=403,
)
def test_no_cookies(self):
"""
The CSRF cookie is checked for POST. Failure to send this cookie should
provide a nice error message.
"""
response = self.client.post("/")
self.assertContains(
response,
"You are seeing this message because this site requires a CSRF "
"cookie when submitting forms. This cookie is required for "
"security reasons, to ensure that your browser is not being "
"hijacked by third parties.",
status_code=403,
)
@override_settings(TEMPLATES=[])
def test_no_django_template_engine(self):
"""
The CSRF view doesn't depend on the TEMPLATES configuration (#24388).
"""
response = self.client.post("/")
self.assertContains(response, "Forbidden", status_code=403)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
CSRF_FAILURE_TEMPLATE_NAME: (
"Test template for CSRF failure"
)
},
),
],
},
}
]
)
def test_custom_template(self):
"""A custom CSRF_FAILURE_TEMPLATE_NAME is used."""
response = self.client.post("/")
self.assertContains(response, "Test template for CSRF failure", status_code=403)
def test_custom_template_does_not_exist(self):
"""An exception is raised if a nonexistent template is supplied."""
factory = RequestFactory()
request = factory.post("/")
with self.assertRaises(TemplateDoesNotExist):
csrf_failure(request, template_name="nonexistent.html")
|
f320fc322164c3c2ccf2abb33172b4bb111e0be25a40748829a468e0e7fbc1ed | import gettext
import json
from os import path
from django.conf import settings
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.test.selenium import SeleniumTestCase
from django.urls import reverse
from django.utils.translation import get_language, override
from django.views.i18n import JavaScriptCatalog, get_formats
from ..urls import locale_dir
@override_settings(ROOT_URLCONF="view_tests.urls")
class SetLanguageTests(TestCase):
"""Test the django.views.i18n.set_language view."""
def _get_inactive_language_code(self):
"""Return language code for a language which is not activated."""
current_language = get_language()
return [code for code, name in settings.LANGUAGES if code != current_language][
0
]
def test_setlang(self):
"""
The set_language view can be used to change the session language.
The user is redirected to the 'next' argument if provided.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"referer": "/i_should_not_be_used/"}
)
self.assertRedirects(response, "/")
# The language is set in a cookie.
language_cookie = self.client.cookies[settings.LANGUAGE_COOKIE_NAME]
self.assertEqual(language_cookie.value, lang_code)
self.assertEqual(language_cookie["domain"], "")
self.assertEqual(language_cookie["path"], "/")
self.assertEqual(language_cookie["max-age"], "")
self.assertEqual(language_cookie["httponly"], "")
self.assertEqual(language_cookie["samesite"], "")
self.assertEqual(language_cookie["secure"], "")
def test_setlang_unsafe_next(self):
"""
The set_language view only redirects to the 'next' argument if it is
"safe".
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "//unsafe/redirection/"}
response = self.client.post("/i18n/setlang/", data=post_data)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_http_next(self):
"""
The set_language view only redirects to the 'next' argument if it is
"safe" and its scheme is HTTPS if the request was sent over HTTPS.
"""
lang_code = self._get_inactive_language_code()
non_https_next_url = "http://testserver/redirection/"
post_data = {"language": lang_code, "next": non_https_next_url}
# Insecure URL in POST data.
response = self.client.post("/i18n/setlang/", data=post_data, secure=True)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
# Insecure URL in HTTP referer.
response = self.client.post(
"/i18n/setlang/", secure=True, headers={"referer": non_https_next_url}
)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_redirect_to_referer(self):
"""
The set_language view redirects to the URL in the referer header when
there isn't a "next" parameter.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"referer": "/i18n/"}
)
self.assertRedirects(response, "/i18n/", fetch_redirect_response=False)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_default_redirect(self):
"""
The set_language view redirects to '/' when there isn't a referer or
"next" parameter.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post("/i18n/setlang/", post_data)
self.assertRedirects(response, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_performs_redirect_for_ajax_if_explicitly_requested(self):
"""
The set_language view redirects to the "next" parameter for requests
not accepting HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertRedirects(response, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_doesnt_perform_a_redirect_to_referer_for_ajax(self):
"""
The set_language view doesn't redirect to the HTTP referer header if
the request doesn't accept HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
headers = {"HTTP_REFERER": "/", "HTTP_ACCEPT": "application/json"}
response = self.client.post("/i18n/setlang/", post_data, **headers)
self.assertEqual(response.status_code, 204)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_doesnt_perform_a_default_redirect_for_ajax(self):
"""
The set_language view returns 204 by default for requests not accepting
HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertEqual(response.status_code, 204)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_unsafe_next_for_ajax(self):
"""
The fallback to root URL for the set_language view works for requests
not accepting HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "//unsafe/redirection/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_reversal(self):
self.assertEqual(reverse("set_language"), "/i18n/setlang/")
def test_setlang_cookie(self):
# we force saving language to a cookie rather than a session
# by excluding session middleware and those which do require it
test_settings = {
"MIDDLEWARE": ["django.middleware.common.CommonMiddleware"],
"LANGUAGE_COOKIE_NAME": "mylanguage",
"LANGUAGE_COOKIE_AGE": 3600 * 7 * 2,
"LANGUAGE_COOKIE_DOMAIN": ".example.com",
"LANGUAGE_COOKIE_PATH": "/test/",
"LANGUAGE_COOKIE_HTTPONLY": True,
"LANGUAGE_COOKIE_SAMESITE": "Strict",
"LANGUAGE_COOKIE_SECURE": True,
}
with self.settings(**test_settings):
post_data = {"language": "pl", "next": "/views/"}
response = self.client.post("/i18n/setlang/", data=post_data)
language_cookie = response.cookies.get("mylanguage")
self.assertEqual(language_cookie.value, "pl")
self.assertEqual(language_cookie["domain"], ".example.com")
self.assertEqual(language_cookie["path"], "/test/")
self.assertEqual(language_cookie["max-age"], 3600 * 7 * 2)
self.assertIs(language_cookie["httponly"], True)
self.assertEqual(language_cookie["samesite"], "Strict")
self.assertIs(language_cookie["secure"], True)
def test_setlang_decodes_http_referer_url(self):
"""
The set_language view decodes the HTTP_REFERER URL and preserves an
encoded query string.
"""
# The URL & view must exist for this to work as a regression test.
self.assertEqual(
reverse("with_parameter", kwargs={"parameter": "x"}), "/test-setlang/x/"
)
lang_code = self._get_inactive_language_code()
# %C3%A4 decodes to ä, %26 to &.
encoded_url = "/test-setlang/%C3%A4/?foo=bar&baz=alpha%26omega"
response = self.client.post(
"/i18n/setlang/", {"language": lang_code}, headers={"referer": encoded_url}
)
self.assertRedirects(response, encoded_url, fetch_redirect_response=False)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
@modify_settings(
MIDDLEWARE={
"append": "django.middleware.locale.LocaleMiddleware",
}
)
def test_lang_from_translated_i18n_pattern(self):
response = self.client.post(
"/i18n/setlang/",
data={"language": "nl"},
follow=True,
headers={"referer": "/en/translated/"},
)
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "nl")
self.assertRedirects(response, "/nl/vertaald/")
# And reverse
response = self.client.post(
"/i18n/setlang/",
data={"language": "en"},
follow=True,
headers={"referer": "/nl/vertaald/"},
)
self.assertRedirects(response, "/en/translated/")
@override_settings(ROOT_URLCONF="view_tests.urls")
class I18NViewTests(SimpleTestCase):
"""Test django.views.i18n views other than set_language."""
@override_settings(LANGUAGE_CODE="de")
def test_get_formats(self):
formats = get_formats()
# Test 3 possible types in get_formats: integer, string, and list.
self.assertEqual(formats["FIRST_DAY_OF_WEEK"], 1)
self.assertEqual(formats["DECIMAL_SEPARATOR"], ",")
self.assertEqual(
formats["TIME_INPUT_FORMATS"], ["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"]
)
def test_jsi18n(self):
"""The javascript_catalog can be deployed with language settings"""
for lang_code in ["es", "fr", "ru"]:
with override(lang_code):
catalog = gettext.translation("djangojs", locale_dir, [lang_code])
trans_txt = catalog.gettext("this is to be translated")
response = self.client.get("/jsi18n/")
self.assertEqual(
response.headers["Content-Type"], 'text/javascript; charset="utf-8"'
)
# response content must include a line like:
# "this is to be translated": <value of trans_txt Python variable>
# json.dumps() is used to be able to check Unicode strings.
self.assertContains(response, json.dumps(trans_txt), 1)
if lang_code == "fr":
# Message with context (msgctxt)
self.assertContains(response, '"month name\\u0004May": "mai"', 1)
@override_settings(USE_I18N=False)
def test_jsi18n_USE_I18N_False(self):
response = self.client.get("/jsi18n/")
# default plural function
self.assertContains(
response,
"django.pluralidx = function(count) { return (count == 1) ? 0 : 1; };",
)
self.assertNotContains(response, "var newcatalog =")
def test_jsoni18n(self):
"""
The json_catalog returns the language catalog and settings as JSON.
"""
with override("de"):
response = self.client.get("/jsoni18n/")
data = json.loads(response.content.decode())
self.assertIn("catalog", data)
self.assertIn("formats", data)
self.assertEqual(
data["formats"]["TIME_INPUT_FORMATS"],
["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"],
)
self.assertEqual(data["formats"]["FIRST_DAY_OF_WEEK"], 1)
self.assertIn("plural", data)
self.assertEqual(data["catalog"]["month name\x04May"], "Mai")
self.assertIn("DATETIME_FORMAT", data["formats"])
self.assertEqual(data["plural"], "(n != 1)")
def test_jsi18n_with_missing_en_files(self):
"""
The javascript_catalog shouldn't load the fallback language in the
case that the current selected language is actually the one translated
from, and hence missing translation files completely.
This happens easily when you're translating from English to other
languages and you've set settings.LANGUAGE_CODE to some other language
than English.
"""
with self.settings(LANGUAGE_CODE="es"), override("en-us"):
response = self.client.get("/jsi18n/")
self.assertNotContains(response, "esto tiene que ser traducido")
def test_jsoni18n_with_missing_en_files(self):
"""
Same as above for the json_catalog view. Here we also check for the
expected JSON format.
"""
with self.settings(LANGUAGE_CODE="es"), override("en-us"):
response = self.client.get("/jsoni18n/")
data = json.loads(response.content.decode())
self.assertIn("catalog", data)
self.assertIn("formats", data)
self.assertIn("plural", data)
self.assertEqual(data["catalog"], {})
self.assertIn("DATETIME_FORMAT", data["formats"])
self.assertIsNone(data["plural"])
def test_jsi18n_fallback_language(self):
"""
Let's make sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), override("fi"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "il faut le traduire")
self.assertNotContains(response, "Untranslated string")
def test_jsi18n_fallback_language_with_custom_locale_dir(self):
"""
The fallback language works when there are several levels of fallback
translation catalogs.
"""
locale_paths = [
path.join(
path.dirname(path.dirname(path.abspath(__file__))),
"custom_locale_path",
),
]
with self.settings(LOCALE_PATHS=locale_paths), override("es_MX"):
response = self.client.get("/jsi18n/")
self.assertContains(
response, "custom_locale_path: esto tiene que ser traducido"
)
response = self.client.get("/jsi18n_no_packages/")
self.assertContains(
response, "custom_locale_path: esto tiene que ser traducido"
)
def test_i18n_fallback_language_plural(self):
"""
The fallback to a language with less plural forms maintains the real
language's number of plural forms and correct translations.
"""
with self.settings(LANGUAGE_CODE="pt"), override("ru"):
response = self.client.get("/jsi18n/")
self.assertEqual(
response.context["catalog"]["{count} plural3"],
["{count} plural3 p3", "{count} plural3 p3s", "{count} plural3 p3t"],
)
self.assertEqual(
response.context["catalog"]["{count} plural2"],
["{count} plural2", "{count} plural2s", ""],
)
with self.settings(LANGUAGE_CODE="ru"), override("pt"):
response = self.client.get("/jsi18n/")
self.assertEqual(
response.context["catalog"]["{count} plural3"],
["{count} plural3", "{count} plural3s"],
)
self.assertEqual(
response.context["catalog"]["{count} plural2"],
["{count} plural2", "{count} plural2s"],
)
def test_i18n_english_variant(self):
with override("en-gb"):
response = self.client.get("/jsi18n/")
self.assertIn(
'"this color is to be translated": "this colour is to be translated"',
response.context["catalog_str"],
)
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English, the selected language
is English and there is not 'en' translation available. See #13388,
#3594 and #13726 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), override("en-us"):
response = self.client.get("/jsi18n/")
self.assertNotContains(response, "Choisir une heure")
@modify_settings(INSTALLED_APPS={"append": "view_tests.app0"})
def test_non_english_default_english_userpref(self):
"""
Same as above with the difference that there IS an 'en' translation
available. The JavaScript i18n view must return a NON empty language catalog
with the proper English translations. See #13726 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), override("en-us"):
response = self.client.get("/jsi18n_english_translation/")
self.assertContains(response, "this app0 string is to be translated")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), override("none"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "Choisir une heure")
def test_escaping(self):
# Force a language via GET otherwise the gettext functions are a noop!
response = self.client.get("/jsi18n_admin/?language=de")
self.assertContains(response, "\\x04")
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app5"]})
def test_non_BMP_char(self):
"""
Non-BMP characters should not break the javascript_catalog (#21725).
"""
with self.settings(LANGUAGE_CODE="en-us"), override("fr"):
response = self.client.get("/jsi18n/app5/")
self.assertContains(response, "emoji")
self.assertContains(response, "\\ud83d\\udca9")
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]})
def test_i18n_language_english_default(self):
"""
Check if the JavaScript i18n view returns a complete language catalog
if the default language is en-us, the selected language has a
translation available and a catalog composed by djangojs domain
translations of multiple Python packages is requested. See #13388,
#3594 and #13514 for more details.
"""
base_trans_string = (
"il faut traduire cette cha\\u00eene de caract\\u00e8res de "
)
app1_trans_string = base_trans_string + "app1"
app2_trans_string = base_trans_string + "app2"
with self.settings(LANGUAGE_CODE="en-us"), override("fr"):
response = self.client.get("/jsi18n_multi_packages1/")
self.assertContains(response, app1_trans_string)
self.assertContains(response, app2_trans_string)
response = self.client.get("/jsi18n/app1/")
self.assertContains(response, app1_trans_string)
self.assertNotContains(response, app2_trans_string)
response = self.client.get("/jsi18n/app2/")
self.assertNotContains(response, app1_trans_string)
self.assertContains(response, app2_trans_string)
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app3", "view_tests.app4"]})
def test_i18n_different_non_english_languages(self):
"""
Similar to above but with neither default or requested language being
English.
"""
with self.settings(LANGUAGE_CODE="fr"), override("es-ar"):
response = self.client.get("/jsi18n_multi_packages2/")
self.assertContains(response, "este texto de app3 debe ser traducido")
def test_i18n_with_locale_paths(self):
extended_locale_paths = settings.LOCALE_PATHS + [
path.join(
path.dirname(path.dirname(path.abspath(__file__))),
"app3",
"locale",
),
]
with self.settings(LANGUAGE_CODE="es-ar", LOCALE_PATHS=extended_locale_paths):
with override("es-ar"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "este texto de app3 debe ser traducido")
def test_i18n_unknown_package_error(self):
view = JavaScriptCatalog.as_view()
request = RequestFactory().get("/")
msg = "Invalid package(s) provided to JavaScriptCatalog: unknown_package"
with self.assertRaisesMessage(ValueError, msg):
view(request, packages="unknown_package")
msg += ",unknown_package2"
with self.assertRaisesMessage(ValueError, msg):
view(request, packages="unknown_package+unknown_package2")
@override_settings(ROOT_URLCONF="view_tests.urls")
class I18nSeleniumTests(SeleniumTestCase):
# The test cases use fixtures & translations from these apps.
available_apps = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"view_tests",
]
@override_settings(LANGUAGE_CODE="de")
def test_javascript_gettext(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + "/jsi18n_template/")
elem = self.selenium.find_element(By.ID, "gettext")
self.assertEqual(elem.text, "Entfernen")
elem = self.selenium.find_element(By.ID, "ngettext_sing")
self.assertEqual(elem.text, "1 Element")
elem = self.selenium.find_element(By.ID, "ngettext_plur")
self.assertEqual(elem.text, "455 Elemente")
elem = self.selenium.find_element(By.ID, "ngettext_onnonplural")
self.assertEqual(elem.text, "Bild")
elem = self.selenium.find_element(By.ID, "pgettext")
self.assertEqual(elem.text, "Kann")
elem = self.selenium.find_element(By.ID, "npgettext_sing")
self.assertEqual(elem.text, "1 Resultat")
elem = self.selenium.find_element(By.ID, "npgettext_plur")
self.assertEqual(elem.text, "455 Resultate")
elem = self.selenium.find_element(By.ID, "formats")
self.assertEqual(
elem.text,
"DATE_INPUT_FORMATS is an object; DECIMAL_SEPARATOR is a string; "
"FIRST_DAY_OF_WEEK is a number;",
)
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]})
@override_settings(LANGUAGE_CODE="fr")
def test_multiple_catalogs(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + "/jsi18n_multi_catalogs/")
elem = self.selenium.find_element(By.ID, "app1string")
self.assertEqual(
elem.text, "il faut traduire cette chaîne de caractères de app1"
)
elem = self.selenium.find_element(By.ID, "app2string")
self.assertEqual(
elem.text, "il faut traduire cette chaîne de caractères de app2"
)
|
455afeaf48688d3ca87275009099e761f3ddcc2c1a988732c4b0227b31300bde | import importlib
import inspect
import os
import re
import sys
import tempfile
import threading
from io import StringIO
from pathlib import Path
from unittest import mock, skipIf, skipUnless
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import DatabaseError, connection
from django.http import Http404, HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.urls import path, reverse
from django.urls.converters import IntConverter
from django.utils.functional import SimpleLazyObject
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.utils.version import PY311
from django.views.debug import (
CallableSettingWrapper,
ExceptionCycleWarning,
ExceptionReporter,
)
from django.views.debug import Path as DebugPath
from django.views.debug import (
SafeExceptionReporterFilter,
default_urlconf,
get_default_exception_reporter_filter,
technical_404_response,
technical_500_response,
)
from django.views.decorators.debug import sensitive_post_parameters, sensitive_variables
from ..views import (
custom_exception_reporter_filter_view,
index_page,
multivalue_dict_key_error,
non_sensitive_view,
paranoid_view,
sensitive_args_function_caller,
sensitive_kwargs_function_caller,
sensitive_method_view,
sensitive_view,
)
class User:
def __str__(self):
return "jacob"
class WithoutEmptyPathUrls:
urlpatterns = [path("url/", index_page, name="url")]
class CallableSettingWrapperTests(SimpleTestCase):
"""Unittests for CallableSettingWrapper"""
def test_repr(self):
class WrappedCallable:
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
class DebugViewTests(SimpleTestCase):
def test_files(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises/")
self.assertEqual(response.status_code, 500)
data = {
"file_data.txt": SimpleUploadedFile("file_data.txt", b"haha"),
}
with self.assertLogs("django.request", "ERROR"):
response = self.client.post("/raises/", data)
self.assertContains(response, "file_data.txt", status_code=500)
self.assertNotContains(response, "haha", status_code=500)
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.security", "WARNING"):
response = self.client.get("/raises400/")
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_400_bad_request(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.request", "WARNING") as cm:
response = self.client.get("/raises400_bad_request/")
self.assertContains(response, '<div class="context" id="', status_code=400)
self.assertEqual(
cm.records[0].getMessage(),
"Malformed request syntax: /raises400_bad_request/",
)
# Ensure no 403.html template exists to test the default case.
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
)
def test_403(self):
response = self.client.get("/raises403/")
self.assertContains(response, "<h1>403 Forbidden</h1>", status_code=403)
# Set up a test 403.html template.
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
"403.html": (
"This is a test template for a 403 error "
"({{ exception }})."
),
},
),
],
},
}
]
)
def test_403_template(self):
response = self.client.get("/raises403/")
self.assertContains(response, "test template", status_code=403)
self.assertContains(response, "(Insufficient Permissions).", status_code=403)
def test_404(self):
response = self.client.get("/raises404/")
self.assertNotContains(
response,
'<pre class="exception_value">',
status_code=404,
)
self.assertContains(
response,
"<p>The current path, <code>not-in-urls</code>, didn’t match any "
"of these.</p>",
status_code=404,
html=True,
)
def test_404_not_in_urls(self):
response = self.client.get("/not-in-urls")
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertNotContains(
response,
'<pre class="exception_value">',
status_code=404,
)
self.assertContains(
response, "Django tried these URL patterns", status_code=404
)
self.assertContains(
response,
"<p>The current path, <code>not-in-urls</code>, didn’t match any "
"of these.</p>",
status_code=404,
html=True,
)
# Pattern and view name of a RegexURLPattern appear.
self.assertContains(
response, r"^regex-post/(?P<pk>[0-9]+)/$", status_code=404
)
self.assertContains(response, "[name='regex-post']", status_code=404)
# Pattern and view name of a RoutePattern appear.
self.assertContains(response, r"path-post/<int:pk>/", status_code=404)
self.assertContains(response, "[name='path-post']", status_code=404)
@override_settings(ROOT_URLCONF=WithoutEmptyPathUrls)
def test_404_empty_path_not_in_urls(self):
response = self.client.get("/")
self.assertContains(
response,
"<p>The empty path didn’t match any of these.</p>",
status_code=404,
html=True,
)
def test_technical_404(self):
response = self.client.get("/technical404/")
self.assertContains(
response,
'<pre class="exception_value">Testing technical 404.</pre>',
status_code=404,
html=True,
)
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(
response,
"<td>view_tests.views.technical404</td>",
status_code=404,
)
self.assertContains(
response,
"<p>The current path, <code>technical404/</code>, matched the "
"last one.</p>",
status_code=404,
html=True,
)
def test_classbased_technical_404(self):
response = self.client.get("/classbased404/")
self.assertContains(
response,
"<th>Raised by:</th><td>view_tests.views.Http404View</td>",
status_code=404,
html=True,
)
def test_technical_500(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(
response,
"<th>Raised during:</th><td>view_tests.views.raises500</td>",
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/", headers={"accept": "text/plain"})
self.assertContains(
response,
"Raised during: view_tests.views.raises500",
status_code=500,
)
def test_classbased_technical_500(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/classbased500/")
self.assertContains(
response,
"<th>Raised during:</th><td>view_tests.views.Raises500View</td>",
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get(
"/classbased500/", headers={"accept": "text/plain"}
)
self.assertContains(
response,
"Raised during: view_tests.views.Raises500View",
status_code=500,
)
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't
be localized.
"""
with self.settings(DEBUG=True):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">',
# not '<div class="context" id="c38,123,208"'.
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(
b'<div class="context" id="(?P<id>[^"]+)">', response.content
)
self.assertIsNotNone(match)
id_repr = match["id"]
self.assertFalse(
re.search(b"[^c0-9]", id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized "
"(value: %s)." % id_repr.decode(),
)
def test_template_exceptions(self):
with self.assertLogs("django.request", "ERROR"):
try:
self.client.get(reverse("template_exception"))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(
raising_loc.find('raise Exception("boom")'),
-1,
"Failed to find 'raise Exception' in last frame of "
"traceback, instead found: %s" % raising_loc,
)
@skipIf(
sys.platform == "win32",
"Raises OSError instead of TemplateDoesNotExist on Windows.",
)
def test_safestring_in_exception(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/safestring_exception/")
self.assertNotContains(
response,
"<script>alert(1);</script>",
status_code=500,
html=True,
)
self.assertContains(
response,
"<script>alert(1);</script>",
count=3,
status_code=500,
html=True,
)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [tempdir],
}
]
), self.assertLogs("django.request", "ERROR"):
response = self.client.get(
reverse(
"raises_template_does_not_exist", kwargs={"path": template_name}
)
)
self.assertContains(
response,
"%s (Source does not exist)" % template_path,
status_code=500,
count=2,
)
# Assert as HTML.
self.assertContains(
response,
"<li><code>django.template.loaders.filesystem.Loader</code>: "
"%s (Source does not exist)</li>"
% os.path.join(tempdir, "notfound.html"),
status_code=500,
html=True,
)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
with self.assertLogs("django.request", "ERROR"):
with self.assertRaises(TemplateDoesNotExist):
self.client.get("/render_no_template/")
@override_settings(ROOT_URLCONF="view_tests.default_urls")
def test_default_urlconf_template(self):
"""
Make sure that the default URLconf template is shown instead of the
technical 404 page, if the user has not altered their URLconf yet.
"""
response = self.client.get("/")
self.assertContains(
response, "<h1>The install worked successfully! Congratulations!</h1>"
)
@override_settings(ROOT_URLCONF="view_tests.regression_21530_urls")
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get("/")
self.assertContains(
response, "Page not found <span>(404)</span>", status_code=404
)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
with mock.patch.object(DebugPath, "open") as m:
default_urlconf(None)
m.assert_called_once_with(encoding="utf-8")
m.reset_mock()
technical_404_response(mock.MagicMock(), mock.Mock())
m.assert_called_once_with(encoding="utf-8")
def test_technical_404_converter_raise_404(self):
with mock.patch.object(IntConverter, "to_python", side_effect=Http404):
response = self.client.get("/path-post/1/")
self.assertContains(response, "Page not found", status_code=404)
def test_exception_reporter_from_request(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/custom_reporter_class_view/")
self.assertContains(response, "custom traceback text", status_code=500)
@override_settings(
DEFAULT_EXCEPTION_REPORTER="view_tests.views.CustomExceptionReporter"
)
def test_exception_reporter_from_settings(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(response, "custom traceback text", status_code=500)
@override_settings(
DEFAULT_EXCEPTION_REPORTER="view_tests.views.TemplateOverrideExceptionReporter"
)
def test_template_override_exception_reporter(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(
response,
"<h1>Oh no, an error occurred!</h1>",
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/", headers={"accept": "text/plain"})
self.assertContains(response, "Oh dear, an error occurred!", status_code=500)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
databases = {"default"}
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug view.
"""
with connection.cursor() as cursor:
try:
cursor.execute("INVALID SQL")
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertContains(response, "OperationalError at /", status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF="view_tests.urls",
# No template directories are configured, so no templates will be found.
TEMPLATES=[
{
"BACKEND": "django.template.backends.dummy.TemplateStrings",
}
],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.security", "WARNING"):
response = self.client.get("/raises400/")
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_400_bad_request(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.request", "WARNING") as cm:
response = self.client.get("/raises400_bad_request/")
self.assertContains(response, '<div class="context" id="', status_code=400)
self.assertEqual(
cm.records[0].getMessage(),
"Malformed request syntax: /raises400_bad_request/",
)
def test_403(self):
response = self.client.get("/raises403/")
self.assertContains(response, "<h1>403 Forbidden</h1>", status_code=403)
def test_404(self):
response = self.client.get("/raises404/")
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse(
"raises_template_does_not_exist", kwargs={"path": "notfound.html"}
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get("/test_view/")
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertIn("<th>Request Method:</th>", html)
self.assertIn("<th>Request URL:</th>", html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn("<p>jacob</p>", html)
self.assertIn("<th>Exception Type:</th>", html)
self.assertIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
self.assertIn("<p>No POST data</p>", html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertNotIn("<th>Request Method:</th>", html)
self.assertNotIn("<th>Request URL:</th>", html)
self.assertNotIn('<h3 id="user-info">USER</h3>', html)
self.assertIn("<th>Exception Type:</th>", html)
self.assertIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
def test_sharing_traceback(self):
try:
raise ValueError("Oops")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
'<form action="https://dpaste.com/" name="pasteform" '
'id="pasteform" method="post">',
html,
)
def test_eol_support(self):
"""The ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = ["print %d" % i for i in range(1, 6)]
reporter = ExceptionReporter(None, None, None, None)
for newline in ["\n", "\r\n", "\r"]:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, (newline.join(LINES) + newline).encode())
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:]),
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">No exception message supplied</pre>', html
)
self.assertIn("<th>Request Method:</th>", html)
self.assertIn("<th>Request URL:</th>", html)
self.assertNotIn("<th>Exception Type:</th>", html)
self.assertNotIn("<th>Exception Value:</th>", html)
self.assertNotIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
def test_suppressed_context(self):
try:
try:
raise RuntimeError("Can't find my keys")
except RuntimeError:
raise ValueError("Can't find my keys") from None
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertIn("<th>Exception Type:</th>", html)
self.assertIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
self.assertNotIn("During handling of the above exception", html)
def test_innermost_exception_without_traceback(self):
try:
try:
raise RuntimeError("Oops")
except Exception as exc:
new_exc = RuntimeError("My context")
exc.__context__ = new_exc
raise
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
self.assertEqual(len(frames), 2)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>RuntimeError</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn("<th>Exception Type:</th>", html)
self.assertIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
self.assertIn(
"During handling of the above exception (My context), another "
"exception occurred",
html,
)
self.assertInHTML('<li class="frame user">None</li>', html)
self.assertIn("Traceback (most recent call last):\n None", html)
text = reporter.get_traceback_text()
self.assertIn("Exception Type: RuntimeError", text)
self.assertIn("Exception Value: Oops", text)
self.assertIn("Traceback (most recent call last):\n None", text)
self.assertIn(
"During handling of the above exception (My context), another "
"exception occurred",
text,
)
@skipUnless(PY311, "Exception notes were added in Python 3.11.")
def test_exception_with_notes(self):
request = self.rf.get("/test_view/")
try:
try:
raise RuntimeError("Oops")
except Exception as err:
err.add_note("First Note")
err.add_note("Second Note")
err.add_note(mark_safe("<script>alert(1);</script>"))
raise err
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
'<pre class="exception_value">Oops\nFirst Note\nSecond Note\n'
"<script>alert(1);</script></pre>",
html,
)
self.assertIn(
"Exception Value: Oops\nFirst Note\nSecond Note\n"
"<script>alert(1);</script>",
html,
)
text = reporter.get_traceback_text()
self.assertIn(
"Exception Value: Oops\nFirst Note\nSecond Note\n"
"<script>alert(1);</script>",
text,
)
def test_mid_stack_exception_without_traceback(self):
try:
try:
raise RuntimeError("Inner Oops")
except Exception as exc:
new_exc = RuntimeError("My context")
new_exc.__context__ = exc
raise RuntimeError("Oops") from new_exc
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>RuntimeError</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn("<th>Exception Type:</th>", html)
self.assertIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertInHTML('<li class="frame user">Traceback: None</li>', html)
self.assertIn(
"During handling of the above exception (Inner Oops), another "
"exception occurred:\n Traceback: None",
html,
)
text = reporter.get_traceback_text()
self.assertIn("Exception Type: RuntimeError", text)
self.assertIn("Exception Value: Oops", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn(
"During handling of the above exception (Inner Oops), another "
"exception occurred:\n Traceback: None",
text,
)
def test_reporting_of_nested_exceptions(self):
request = self.rf.get("/test_view/")
try:
try:
raise AttributeError(mark_safe("<p>Top level</p>"))
except AttributeError as explicit:
try:
raise ValueError(mark_safe("<p>Second exception</p>")) from explicit
except ValueError:
raise IndexError(mark_safe("<p>Final exception</p>"))
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = (
"The above exception ({0}) was the direct cause of the following exception:"
)
implicit_exc = (
"During handling of the above exception ({0}), another exception occurred:"
)
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(
2, html.count(explicit_exc.format("<p>Top level</p>"))
)
self.assertEqual(
2, html.count(implicit_exc.format("<p>Second exception</p>"))
)
self.assertEqual(10, html.count("<p>Final exception</p>"))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format("<p>Top level</p>"), text)
self.assertIn(implicit_exc.format("<p>Second exception</p>"), text)
self.assertEqual(3, text.count("<p>Final exception</p>"))
@skipIf(
sys._xoptions.get("no_debug_ranges", False)
or os.environ.get("PYTHONNODEBUGRANGES", False),
"Fine-grained error locations are disabled.",
)
@skipUnless(PY311, "Fine-grained error locations were added in Python 3.11.")
def test_highlight_error_position(self):
request = self.rf.get("/test_view/")
try:
try:
raise AttributeError("Top level")
except AttributeError as explicit:
try:
raise ValueError(mark_safe("<p>2nd exception</p>")) from explicit
except ValueError:
raise IndexError("Final exception")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
"<pre> raise AttributeError("Top level")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
self.assertIn(
"<pre> raise ValueError(mark_safe("
""<p>2nd exception</p>")) from explicit\n"
" "
"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
self.assertIn(
"<pre> raise IndexError("Final exception")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
# Pastebin.
self.assertIn(
" raise AttributeError("Top level")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
self.assertIn(
" raise ValueError(mark_safe("
""<p>2nd exception</p>")) from explicit\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
self.assertIn(
" raise IndexError("Final exception")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
# Text traceback.
text = reporter.get_traceback_text()
self.assertIn(
' raise AttributeError("Top level")\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
self.assertIn(
' raise ValueError(mark_safe("<p>2nd exception</p>")) from explicit\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
self.assertIn(
' raise IndexError("Final exception")\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
def test_reporting_frames_without_source(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, "generated", "exec")
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame["context_line"], "<source code not available>")
self.assertEqual(last_frame["filename"], "generated")
self.assertEqual(last_frame["function"], "funcName")
self.assertEqual(last_frame["lineno"], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
text,
)
def test_reporting_frames_source_not_match(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, "generated", "exec")
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
with mock.patch(
"django.views.debug.ExceptionReporter._get_source",
return_value=["wrong source"],
):
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame["context_line"], "<source code not available>")
self.assertEqual(last_frame["filename"], "generated")
self.assertEqual(last_frame["function"], "funcName")
self.assertEqual(last_frame["lineno"], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
" <source code not available>",
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
text,
)
def test_reporting_frames_for_cyclic_reference(self):
try:
def test_func():
try:
raise RuntimeError("outer") from RuntimeError("inner")
except RuntimeError as exc:
raise exc.__cause__
test_func()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
def generate_traceback_frames(*args, **kwargs):
nonlocal tb_frames
tb_frames = reporter.get_traceback_frames()
tb_frames = None
tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True)
msg = (
"Cycle in the exception chain detected: exception 'inner' "
"encountered again."
)
with self.assertWarnsMessage(ExceptionCycleWarning, msg):
tb_generator.start()
tb_generator.join(timeout=5)
if tb_generator.is_alive():
# tb_generator is a daemon that runs until the main thread/process
# exits. This is resource heavy when running the full test suite.
# Setting the following values to None makes
# reporter.get_traceback_frames() exit early.
exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None
tb_generator.join()
self.fail("Cyclic reference in Exception Reporter.get_traceback_frames()")
if tb_frames is None:
# can happen if the thread generating traceback got killed
# or exception while generating the traceback
self.fail("Traceback generation failed")
last_frame = tb_frames[-1]
self.assertIn("raise exc.__cause__", last_frame["context_line"])
self.assertEqual(last_frame["filename"], __file__)
self.assertEqual(last_frame["function"], "test_func")
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">I'm a little teapot</pre>', html
)
self.assertIn("<th>Request Method:</th>", html)
self.assertIn("<th>Request URL:</th>", html)
self.assertNotIn("<th>Exception Type:</th>", html)
self.assertNotIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report</h1>", html)
self.assertIn(
'<pre class="exception_value">I'm a little teapot</pre>', html
)
self.assertNotIn("<th>Request Method:</th>", html)
self.assertNotIn("<th>Request URL:</th>", html)
self.assertNotIn("<th>Exception Type:</th>", html)
self.assertNotIn("<th>Exception Value:</th>", html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
def test_non_utf8_values_handling(self):
"Non-UTF-8 exceptions/values should not make the output generation choke."
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b"EXC\xe9EXC"
somevar = b"VAL\xe9VAL" # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn("VAL\\xe9VAL", html)
self.assertIn("EXC\\xe9EXC", html)
def test_local_variable_escaping(self):
"""Safe strings in local variables are escaped."""
try:
local = mark_safe("<p>Local variable</p>")
raise ValueError(local)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html()
self.assertIn(
'<td class="code"><pre>'<p>Local variable</p>'</pre>'
"</td>",
html,
)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput:
def __repr__(self):
raise MemoryError("OOM")
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(""))
try:
class LargeOutput:
def __repr__(self):
return repr("A" * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn(
"<trimmed %d bytes string>" % (large + repr_of_str_adds,), html
)
def test_encoding_error(self):
"""
A UnicodeError displays a portion of the problematic string. HTML in
safe strings is escaped.
"""
try:
mark_safe("abcdefghijkl<p>mnὀp</p>qrstuwxyz").encode("ascii")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn("<h2>Unicode error hint</h2>", html)
self.assertIn("The string that could not be encoded/decoded was: ", html)
self.assertIn("<strong><p>mnὀp</p></strong>", html)
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError. Refs #21443.
"""
try:
request = self.rf.get("/test_view/")
importlib.import_module("abc.def.invalid.name")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ModuleNotFoundError at /test_view/</h1>", html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get("/test_view/")
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
self.assertIn(
"BrokenEvaluation",
ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(),
"Evaluation exception reason not mentioned in traceback",
)
@override_settings(ALLOWED_HOSTS="example.com")
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get("/", headers={"host": "evil.com"})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
value = '<td>items</td><td class="code"><pre>'Oops'</pre></td>'
# GET
request = self.rf.get("/test_view/?items=Oops")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# POST
request = self.rf.post("/test_view/", data={"items": "Oops"})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# FILES
fp = StringIO("filecontent")
request = self.rf.post("/test_view/", data={"name": "filename", "items": fp})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre><InMemoryUploadedFile: '
"items (application/octet-stream)></pre></td>",
html,
)
# COOKIES
rf = RequestFactory()
rf.cookies["items"] = "Oops"
request = rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre>'Oops'</pre></td>', html
)
def test_exception_fetching_user(self):
"""
The error page can be rendered if the current user can't be retrieved
(such as when the database is unavailable).
"""
class ExceptionUser:
def __str__(self):
raise Exception()
request = self.rf.get("/test_view/")
request.user = ExceptionUser()
try:
raise ValueError("Oops")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError at /test_view/</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn("<p>[unable to retrieve the current user]</p>", html)
text = reporter.get_traceback_text()
self.assertIn("USER: [unable to retrieve the current user]", text)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
reporter = ExceptionReporter(None, None, None, None)
with mock.patch.object(DebugPath, "open") as m:
reporter.get_traceback_html()
m.assert_called_once_with(encoding="utf-8")
m.reset_mock()
reporter.get_traceback_text()
m.assert_called_once_with(encoding="utf-8")
@override_settings(ALLOWED_HOSTS=["example.com"])
def test_get_raw_insecure_uri(self):
factory = RequestFactory(headers={"host": "evil.com"})
tests = [
("////absolute-uri", "http://evil.com//absolute-uri"),
("/?foo=bar", "http://evil.com/?foo=bar"),
("/path/with:colons", "http://evil.com/path/with:colons"),
]
for url, expected in tests:
with self.subTest(url=url):
request = factory.get(url)
reporter = ExceptionReporter(request, None, None, None)
self.assertEqual(reporter._get_raw_insecure_uri(), expected)
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get("/test_view/")
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn("ValueError at /test_view/", text)
self.assertIn("Can't find my keys", text)
self.assertIn("Request Method:", text)
self.assertIn("Request URL:", text)
self.assertIn("USER: jacob", text)
self.assertIn("Exception Type:", text)
self.assertIn("Exception Value:", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn("Request information:", text)
self.assertNotIn("Request data not supplied", text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn("ValueError", text)
self.assertIn("Can't find my keys", text)
self.assertNotIn("Request Method:", text)
self.assertNotIn("Request URL:", text)
self.assertNotIn("USER:", text)
self.assertIn("Exception Type:", text)
self.assertIn("Exception Value:", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn("Request data not supplied", text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(DEBUG=True)
def test_template_exception(self):
request = self.rf.get("/test_view/")
try:
render(request, "debug/template_error.html")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
templ_path = Path(
Path(__file__).parents[1], "templates", "debug", "template_error.html"
)
self.assertIn(
"Template error:\n"
"In template %(path)s, error at line 2\n"
" 'cycle' tag requires at least two arguments\n"
" 1 : Template with error:\n"
" 2 : {%% cycle %%} \n"
" 3 : " % {"path": templ_path},
text,
)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
# GET
request = self.rf.get("/test_view/?items=Oops")
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# POST
request = self.rf.post("/test_view/", data={"items": "Oops"})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# FILES
fp = StringIO("filecontent")
request = self.rf.post("/test_view/", data={"name": "filename", "items": fp})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = <InMemoryUploadedFile:", text)
# COOKIES
rf = RequestFactory()
rf.cookies["items"] = "Oops"
request = rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS="example.com")
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get("/", headers={"host": "evil.com"})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin:
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {
"sausage-key": "sausage-value",
"baked-beans-key": "baked-beans-value",
"hash-brown-key": "hash-brown-value",
"bacon-key": "bacon-value",
}
def verify_unsafe_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertContains(response, "scrambled", status_code=500)
self.assertContains(response, "sauce", status_code=500)
self.assertContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertContains(response, "scrambled", status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, "sauce", status_code=500)
self.assertNotContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, "baked-beans-value", status_code=500)
self.assertContains(response, "hash-brown-value", status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, "sausage-value", status_code=500)
self.assertNotContains(response, "bacon-value", status_code=500)
def verify_paranoid_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertNotContains(response, "scrambled", status_code=500)
self.assertContains(response, "sauce", status_code=500)
self.assertNotContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=[("Admin", "[email protected]")]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn("cooked_eggs", body_plain)
self.assertNotIn("scrambled", body_plain)
self.assertNotIn("sauce", body_plain)
self.assertNotIn("worcestershire", body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn("cooked_eggs", body_html)
self.assertIn("scrambled", body_html)
self.assertIn("sauce", body_html)
self.assertIn("worcestershire", body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=[("Admin", "[email protected]")]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn("cooked_eggs", body_plain)
self.assertNotIn("scrambled", body_plain)
self.assertNotIn("sauce", body_plain)
self.assertNotIn("worcestershire", body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn("cooked_eggs", body_html)
self.assertIn("scrambled", body_html)
self.assertIn("sauce", body_html)
self.assertNotIn("worcestershire", body_html)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn("baked-beans-value", body_plain)
self.assertIn("hash-brown-value", body_plain)
self.assertIn("baked-beans-value", body_html)
self.assertIn("hash-brown-value", body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn("sausage-value", body_plain)
self.assertNotIn("bacon-value", body_plain)
self.assertNotIn("sausage-value", body_html)
self.assertNotIn("bacon-value", body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=[("Admin", "[email protected]")]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = str(email.body)
self.assertNotIn("cooked_eggs", body)
self.assertNotIn("scrambled", body)
self.assertNotIn("sauce", body)
self.assertNotIn("worcestershire", body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF="view_tests.urls")
class ExceptionReporterFilterTests(
ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase
):
"""
Sensitive information can be filtered out of error reports (#14614).
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
No POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
The sensitive_variables decorator works with object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
sensitive_method_view, check_for_POST_params=False
)
self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_method_view, check_for_POST_params=False
)
self.verify_safe_email(sensitive_method_view, check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as arguments to the decorated
function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_args_function_caller, check_for_POST_params=False
)
self.verify_safe_email(
sensitive_args_function_caller, check_for_POST_params=False
)
def test_sensitive_function_keyword_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as keyword arguments to the
decorated function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_kwargs_function_caller, check_for_POST_params=False
)
self.verify_safe_email(
sensitive_kwargs_function_caller, check_for_POST_params=False
)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "This should not be displayed", status_code=500
)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots:
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "This should not be displayed", status_code=500
)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get("/raises500/")
self.assertContains(response, "FOOBAR", status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
"SECRET_KEY",
"SECRET_KEY_FALLBACKS",
"PASSWORD",
"API_KEY",
"AUTH_TOKEN",
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "should not be displayed", status_code=500
)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
"SECRET_KEY",
"SECRET_KEY_FALLBACKS",
"PASSWORD",
"API_KEY",
"AUTH_TOKEN",
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
"recursive": {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "should not be displayed", status_code=500
)
def test_cleanse_setting_basic(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(reporter_filter.cleanse_setting("TEST", "TEST"), "TEST")
self.assertEqual(
reporter_filter.cleanse_setting("PASSWORD", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_ignore_case(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(
reporter_filter.cleanse_setting("password", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_recurses_in_dictionary(self):
reporter_filter = SafeExceptionReporterFilter()
initial = {"login": "cooper", "password": "secret"}
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
{"login": "cooper", "password": reporter_filter.cleansed_substitute},
)
def test_cleanse_setting_recurses_in_dictionary_with_non_string_key(self):
reporter_filter = SafeExceptionReporterFilter()
initial = {("localhost", 8000): {"login": "cooper", "password": "secret"}}
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
{
("localhost", 8000): {
"login": "cooper",
"password": reporter_filter.cleansed_substitute,
},
},
)
def test_cleanse_setting_recurses_in_list_tuples(self):
reporter_filter = SafeExceptionReporterFilter()
initial = [
{
"login": "cooper",
"password": "secret",
"apps": (
{"name": "app1", "api_key": "a06b-c462cffae87a"},
{"name": "app2", "api_key": "a9f4-f152e97ad808"},
),
"tokens": ["98b37c57-ec62-4e39", "8690ef7d-8004-4916"],
},
{"SECRET_KEY": "c4d77c62-6196-4f17-a06b-c462cffae87a"},
]
cleansed = [
{
"login": "cooper",
"password": reporter_filter.cleansed_substitute,
"apps": (
{"name": "app1", "api_key": reporter_filter.cleansed_substitute},
{"name": "app2", "api_key": reporter_filter.cleansed_substitute},
),
"tokens": reporter_filter.cleansed_substitute,
},
{"SECRET_KEY": reporter_filter.cleansed_substitute},
]
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
cleansed,
)
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", tuple(initial)),
tuple(cleansed),
)
def test_request_meta_filtering(self):
request = self.rf.get("/", headers={"secret-header": "super_secret"})
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(
reporter_filter.get_safe_request_meta(request)["HTTP_SECRET_HEADER"],
reporter_filter.cleansed_substitute,
)
def test_exception_report_uses_meta_filtering(self):
response = self.client.get(
"/raises500/", headers={"secret-header": "super_secret"}
)
self.assertNotIn(b"super_secret", response.content)
response = self.client.get(
"/raises500/",
headers={"secret-header": "super_secret", "accept": "application/json"},
)
self.assertNotIn(b"super_secret", response.content)
@override_settings(SESSION_COOKIE_NAME="djangosession")
def test_cleanse_session_cookie_value(self):
self.client.cookies.load({"djangosession": "should not be displayed"})
response = self.client.get("/raises500/")
self.assertNotContains(response, "should not be displayed", status_code=500)
class CustomExceptionReporterFilter(SafeExceptionReporterFilter):
cleansed_substitute = "XXXXXXXXXXXXXXXXXXXX"
hidden_settings = _lazy_re_compile(
"API|TOKEN|KEY|SECRET|PASS|SIGNATURE|DATABASE_URL", flags=re.I
)
@override_settings(
ROOT_URLCONF="view_tests.urls",
DEFAULT_EXCEPTION_REPORTER_FILTER="%s.CustomExceptionReporterFilter" % __name__,
)
class CustomExceptionReporterFilterTests(SimpleTestCase):
def setUp(self):
get_default_exception_reporter_filter.cache_clear()
def tearDown(self):
get_default_exception_reporter_filter.cache_clear()
def test_setting_allows_custom_subclass(self):
self.assertIsInstance(
get_default_exception_reporter_filter(),
CustomExceptionReporterFilter,
)
def test_cleansed_substitute_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting("password", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_hidden_settings_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting("database_url", "super_secret"),
reporter_filter.cleansed_substitute,
)
class NonHTMLResponseExceptionReporterFilter(
ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase
):
"""
Sensitive information can be filtered out of error reports.
The plain text 500 debug-only error page is served when it has been
detected the request doesn't accept HTML content. Don't check for
(non)existence of frames vars in the traceback information section of the
response content because they're not included in these error pages.
Refs #14614.
"""
rf = RequestFactory(headers={"accept": "application/json"})
def test_non_sensitive_request(self):
"""
Request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
No POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
custom_exception_reporter_filter_view, check_for_vars=False
)
with self.settings(DEBUG=False):
self.verify_unsafe_response(
custom_exception_reporter_filter_view, check_for_vars=False
)
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
def test_non_html_response_encoding(self):
response = self.client.get(
"/raises500/", headers={"accept": "application/json"}
)
self.assertEqual(response.headers["Content-Type"], "text/plain; charset=utf-8")
class DecoratorsTests(SimpleTestCase):
def test_sensitive_variables_not_called(self):
msg = (
"sensitive_variables() must be called to use it as a decorator, "
"e.g., use @sensitive_variables(), not @sensitive_variables."
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_variables
def test_func(password):
pass
def test_sensitive_post_parameters_not_called(self):
msg = (
"sensitive_post_parameters() must be called to use it as a "
"decorator, e.g., use @sensitive_post_parameters(), not "
"@sensitive_post_parameters."
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_post_parameters
def test_func(request):
return index_page(request)
def test_sensitive_post_parameters_http_request(self):
class MyClass:
@sensitive_post_parameters()
def a_view(self, request):
return HttpResponse()
msg = (
"sensitive_post_parameters didn't receive an HttpRequest object. "
"If you are decorating a classmethod, make sure to use "
"@method_decorator."
)
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(HttpRequest())
|
e0497fda701242aea2cff1ef82e40a5e9d93272dd8b9afd506d9c20d2da3a57c | from datetime import datetime, timedelta
from django.template.defaultfilters import timesince_filter
from django.test import SimpleTestCase
from django.test.utils import requires_tz_support
from ..utils import setup
from .timezone_utils import TimezoneTestCase
class TimesinceTests(TimezoneTestCase):
"""
#20246 - \xa0 in output avoids line-breaks between value and unit
"""
# Default compare with datetime.now()
@setup({"timesince01": "{{ a|timesince }}"})
def test_timesince01(self):
output = self.engine.render_to_string(
"timesince01", {"a": datetime.now() + timedelta(minutes=-1, seconds=-10)}
)
self.assertEqual(output, "1\xa0minute")
@setup({"timesince02": "{{ a|timesince }}"})
def test_timesince02(self):
output = self.engine.render_to_string(
"timesince02", {"a": datetime.now() - timedelta(days=1, minutes=1)}
)
self.assertEqual(output, "1\xa0day")
@setup({"timesince03": "{{ a|timesince }}"})
def test_timesince03(self):
output = self.engine.render_to_string(
"timesince03",
{"a": datetime.now() - timedelta(hours=1, minutes=25, seconds=10)},
)
self.assertEqual(output, "1\xa0hour, 25\xa0minutes")
# Compare to a given parameter
@setup({"timesince04": "{{ a|timesince:b }}"})
def test_timesince04(self):
output = self.engine.render_to_string(
"timesince04",
{"a": self.now - timedelta(days=2), "b": self.now - timedelta(days=1)},
)
self.assertEqual(output, "1\xa0day")
@setup({"timesince05": "{{ a|timesince:b }}"})
def test_timesince05(self):
output = self.engine.render_to_string(
"timesince05",
{
"a": self.now - timedelta(days=2, minutes=1),
"b": self.now - timedelta(days=2),
},
)
self.assertEqual(output, "1\xa0minute")
# Timezone is respected
@setup({"timesince06": "{{ a|timesince:b }}"})
def test_timesince06(self):
output = self.engine.render_to_string(
"timesince06", {"a": self.now_tz - timedelta(hours=8), "b": self.now_tz}
)
self.assertEqual(output, "8\xa0hours")
# Tests for #7443
@setup({"timesince07": "{{ earlier|timesince }}"})
def test_timesince07(self):
output = self.engine.render_to_string(
"timesince07", {"earlier": self.now - timedelta(days=7)}
)
self.assertEqual(output, "1\xa0week")
@setup({"timesince08": "{{ earlier|timesince:now }}"})
def test_timesince08(self):
output = self.engine.render_to_string(
"timesince08", {"now": self.now, "earlier": self.now - timedelta(days=7)}
)
self.assertEqual(output, "1\xa0week")
@setup({"timesince09": "{{ later|timesince }}"})
def test_timesince09(self):
output = self.engine.render_to_string(
"timesince09", {"later": self.now + timedelta(days=7)}
)
self.assertEqual(output, "0\xa0minutes")
@setup({"timesince10": "{{ later|timesince:now }}"})
def test_timesince10(self):
output = self.engine.render_to_string(
"timesince10", {"now": self.now, "later": self.now + timedelta(days=7)}
)
self.assertEqual(output, "0\xa0minutes")
# Differing timezones are calculated correctly.
@setup({"timesince11": "{{ a|timesince }}"})
def test_timesince11(self):
output = self.engine.render_to_string("timesince11", {"a": self.now})
self.assertEqual(output, "0\xa0minutes")
@requires_tz_support
@setup({"timesince12": "{{ a|timesince }}"})
def test_timesince12(self):
output = self.engine.render_to_string("timesince12", {"a": self.now_tz})
self.assertEqual(output, "0\xa0minutes")
@requires_tz_support
@setup({"timesince13": "{{ a|timesince }}"})
def test_timesince13(self):
output = self.engine.render_to_string("timesince13", {"a": self.now_tz_i})
self.assertEqual(output, "0\xa0minutes")
@setup({"timesince14": "{{ a|timesince:b }}"})
def test_timesince14(self):
output = self.engine.render_to_string(
"timesince14", {"a": self.now_tz, "b": self.now_tz_i}
)
self.assertEqual(output, "0\xa0minutes")
@setup({"timesince15": "{{ a|timesince:b }}"})
def test_timesince15(self):
output = self.engine.render_to_string(
"timesince15", {"a": self.now, "b": self.now_tz_i}
)
self.assertEqual(output, "")
@setup({"timesince16": "{{ a|timesince:b }}"})
def test_timesince16(self):
output = self.engine.render_to_string(
"timesince16", {"a": self.now_tz_i, "b": self.now}
)
self.assertEqual(output, "")
# Tests for #9065 (two date objects).
@setup({"timesince17": "{{ a|timesince:b }}"})
def test_timesince17(self):
output = self.engine.render_to_string(
"timesince17", {"a": self.today, "b": self.today}
)
self.assertEqual(output, "0\xa0minutes")
@setup({"timesince18": "{{ a|timesince:b }}"})
def test_timesince18(self):
output = self.engine.render_to_string(
"timesince18", {"a": self.today, "b": self.today + timedelta(hours=24)}
)
self.assertEqual(output, "1\xa0day")
# Tests for #33879 (wrong results for 11 months + several weeks).
@setup({"timesince19": "{{ earlier|timesince }}"})
def test_timesince19(self):
output = self.engine.render_to_string(
"timesince19", {"earlier": self.today - timedelta(days=358)}
)
self.assertEqual(output, "11\xa0months, 3\xa0weeks")
@setup({"timesince20": "{{ a|timesince:b }}"})
def test_timesince20(self):
now = datetime(2018, 5, 9)
output = self.engine.render_to_string(
"timesince20",
{"a": now, "b": now + timedelta(days=365) + timedelta(days=364)},
)
self.assertEqual(output, "1\xa0year, 11\xa0months")
class FunctionTests(SimpleTestCase):
def test_since_now(self):
self.assertEqual(timesince_filter(datetime.now() - timedelta(1)), "1\xa0day")
def test_no_args(self):
self.assertEqual(timesince_filter(None), "")
def test_explicit_date(self):
self.assertEqual(
timesince_filter(datetime(2005, 12, 29), datetime(2005, 12, 30)), "1\xa0day"
)
|
e910e21a554b6b35388368c2e833d80ebb0c3b130b199a7e46f773b24b6795e3 | from datetime import date, datetime
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.urls import path
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
lastmod = date.today()
def items(self):
return [object()]
class SimplePagedSitemap(Sitemap):
lastmod = date.today()
def items(self):
return [object() for x in range(Sitemap.limit + 1)]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.order_by("pk").all()
class AlternatesI18nSitemap(SimpleI18nSitemap):
alternates = True
class LimitedI18nSitemap(AlternatesI18nSitemap):
languages = ["en", "es"]
class XDefaultI18nSitemap(AlternatesI18nSitemap):
x_default = True
class ItemByLangSitemap(SimpleI18nSitemap):
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class ItemByLangAlternatesSitemap(AlternatesI18nSitemap):
x_default = True
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class FixedNewerLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 4, 20, 5, 0, 0)
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
class CallableLastmodPartialSitemap(Sitemap):
"""Not all items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodFullSitemap(Sitemap):
"""All items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
o2.lastmod = datetime(2014, 3, 13, 10, 0, 0)
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodNoItemsSitemap(Sitemap):
location = "/location/"
def items(self):
return []
def lastmod(self, obj):
return obj.lastmod
class GetLatestLastmodNoneSiteMap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
def items(self):
return [object()]
def lastmod(self, obj):
return datetime(2013, 3, 13, 10, 0, 0)
def get_latest_lastmod(self):
return None
class GetLatestLastmodSiteMap(SimpleSitemap):
def get_latest_lastmod(self):
return datetime(2013, 3, 13, 10, 0, 0)
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
"simple": SimpleSitemap,
}
simple_i18n_sitemaps = {
"i18n": SimpleI18nSitemap,
}
alternates_i18n_sitemaps = {
"i18n-alternates": AlternatesI18nSitemap,
}
limited_i18n_sitemaps = {
"i18n-limited": LimitedI18nSitemap,
}
xdefault_i18n_sitemaps = {
"i18n-xdefault": XDefaultI18nSitemap,
}
item_by_lang_i18n_sitemaps = {
"i18n-item-by-lang": ItemByLangSitemap,
}
item_by_lang_alternates_i18n_sitemaps = {
"i18n-item-by-lang-alternates": ItemByLangAlternatesSitemap,
}
simple_sitemaps_not_callable = {
"simple": SimpleSitemap(),
}
simple_sitemaps_paged = {
"simple": SimplePagedSitemap,
}
empty_sitemaps = {
"empty": EmptySitemap,
}
fixed_lastmod_sitemaps = {
"fixed-lastmod": FixedLastmodSitemap,
}
fixed_lastmod_mixed_sitemaps = {
"fixed-lastmod-mixed": FixedLastmodMixedSitemap,
}
sitemaps_lastmod_mixed_ascending = {
"no-lastmod": EmptySitemap,
"lastmod": FixedLastmodSitemap,
}
sitemaps_lastmod_mixed_descending = {
"lastmod": FixedLastmodSitemap,
"no-lastmod": EmptySitemap,
}
sitemaps_lastmod_ascending = {
"date": DateSiteMap,
"datetime": FixedLastmodSitemap,
"datetime-newer": FixedNewerLastmodSitemap,
}
sitemaps_lastmod_descending = {
"datetime-newer": FixedNewerLastmodSitemap,
"datetime": FixedLastmodSitemap,
"date": DateSiteMap,
}
generic_sitemaps = {
"generic": GenericSitemap({"queryset": TestModel.objects.order_by("pk").all()}),
}
get_latest_lastmod_none_sitemaps = {
"get-latest-lastmod-none": GetLatestLastmodNoneSiteMap,
}
get_latest_lastmod_sitemaps = {
"get-latest-lastmod": GetLatestLastmodSiteMap,
}
latest_lastmod_timezone_sitemaps = {
"latest-lastmod-timezone": TimezoneSiteMap,
}
generic_sitemaps_lastmod = {
"generic": GenericSitemap(
{
"queryset": TestModel.objects.order_by("pk").all(),
"date_field": "lastmod",
}
),
}
callable_lastmod_partial_sitemap = {
"callable-lastmod": CallableLastmodPartialSitemap,
}
callable_lastmod_full_sitemap = {
"callable-lastmod": CallableLastmodFullSitemap,
}
callable_lastmod_no_items_sitemap = {
"callable-lastmod": CallableLastmodNoItemsSitemap,
}
urlpatterns = [
path("simple/index.xml", views.index, {"sitemaps": simple_sitemaps}),
path("simple-paged/index.xml", views.index, {"sitemaps": simple_sitemaps_paged}),
path(
"simple-not-callable/index.xml",
views.index,
{"sitemaps": simple_sitemaps_not_callable},
),
path(
"simple/custom-index.xml",
views.index,
{"sitemaps": simple_sitemaps, "template_name": "custom_sitemap_index.xml"},
),
path(
"simple/custom-lastmod-index.xml",
views.index,
{
"sitemaps": simple_sitemaps,
"template_name": "custom_sitemap_lastmod_index.xml",
},
),
path(
"simple/sitemap-<section>.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/i18n.xml",
views.sitemap,
{"sitemaps": simple_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"alternates/i18n.xml",
views.sitemap,
{"sitemaps": alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"limited/i18n.xml",
views.sitemap,
{"sitemaps": limited_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"x-default/i18n.xml",
views.sitemap,
{"sitemaps": xdefault_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/custom-sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps, "template_name": "custom_sitemap.xml"},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"empty/sitemap.xml",
views.sitemap,
{"sitemaps": empty_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-mixed/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_mixed_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/date-sitemap.xml",
views.sitemap,
{"sitemaps": {"date-sitemap": DateSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/tz-sitemap.xml",
views.sitemap,
{"sitemaps": {"tz-sitemap": TimezoneSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang-alternates/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/get-latest-lastmod-none-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_none_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/get-latest-lastmod-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/latest-lastmod-timezone-sitemap.xml",
views.index,
{"sitemaps": latest_lastmod_timezone_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"generic/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"generic-lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"cached/index.xml",
cache_page(1)(views.index),
{"sitemaps": simple_sitemaps, "sitemap_url_name": "cached_sitemap"},
),
path(
"cached/sitemap-<section>.xml",
cache_page(1)(views.sitemap),
{"sitemaps": simple_sitemaps},
name="cached_sitemap",
),
path(
"sitemap-without-entries/sitemap.xml",
views.sitemap,
{"sitemaps": {}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"callable-lastmod-partial/index.xml",
views.index,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-partial/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-full/index.xml",
views.index,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-full/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-no-items/index.xml",
views.index,
{"sitemaps": callable_lastmod_no_items_sitemap},
),
path(
"generic-lastmod/index.xml",
views.index,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.index",
),
]
urlpatterns += i18n_patterns(
path("i18n/testmodel/<int:id>/", testmodelview, name="i18n_testmodel"),
)
|
81ed93c910a52e6212de7165bd968bc49044b0bab89b6ccb69e63a8db4524246 | import os
import re
import tempfile
import threading
import unittest
from pathlib import Path
from unittest import mock
from django.db import NotSupportedError, connection, transaction
from django.db.models import Aggregate, Avg, CharField, StdDev, Sum, Variance
from django.db.utils import ConnectionHandler
from django.test import (
TestCase,
TransactionTestCase,
override_settings,
skipIfDBFeature,
)
from django.test.utils import isolate_apps
from ..models import Author, Item, Object, Square
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
class Tests(TestCase):
longMessage = True
def test_aggregation(self):
"""Raise NotSupportedError when aggregating on date/time fields."""
for aggregate in (Sum, Avg, Variance, StdDev):
with self.assertRaises(NotSupportedError):
Item.objects.aggregate(aggregate("time"))
with self.assertRaises(NotSupportedError):
Item.objects.aggregate(aggregate("date"))
with self.assertRaises(NotSupportedError):
Item.objects.aggregate(aggregate("last_modified"))
with self.assertRaises(NotSupportedError):
Item.objects.aggregate(
**{
"complex": aggregate("last_modified")
+ aggregate("last_modified")
}
)
def test_distinct_aggregation(self):
class DistinctAggregate(Aggregate):
allow_distinct = True
aggregate = DistinctAggregate("first", "second", distinct=True)
msg = (
"SQLite doesn't support DISTINCT on aggregate functions accepting "
"multiple arguments."
)
with self.assertRaisesMessage(NotSupportedError, msg):
connection.ops.check_expression_support(aggregate)
def test_distinct_aggregation_multiple_args_no_distinct(self):
# Aggregate functions accept multiple arguments when DISTINCT isn't
# used, e.g. GROUP_CONCAT().
class DistinctAggregate(Aggregate):
allow_distinct = True
aggregate = DistinctAggregate("first", "second", distinct=False)
connection.ops.check_expression_support(aggregate)
def test_memory_db_test_name(self):
"""A named in-memory db should be allowed where supported."""
from django.db.backends.sqlite3.base import DatabaseWrapper
settings_dict = {
"TEST": {
"NAME": "file:memorydb_test?mode=memory&cache=shared",
}
}
creation = DatabaseWrapper(settings_dict).creation
self.assertEqual(
creation._get_test_db_name(),
creation.connection.settings_dict["TEST"]["NAME"],
)
def test_regexp_function(self):
tests = (
("test", r"[0-9]+", False),
("test", r"[a-z]+", True),
("test", None, None),
(None, r"[a-z]+", None),
(None, None, None),
)
for string, pattern, expected in tests:
with self.subTest((string, pattern)):
with connection.cursor() as cursor:
cursor.execute("SELECT %s REGEXP %s", [string, pattern])
value = cursor.fetchone()[0]
value = bool(value) if value in {0, 1} else value
self.assertIs(value, expected)
def test_pathlib_name(self):
with tempfile.TemporaryDirectory() as tmp:
settings_dict = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": Path(tmp) / "test.db",
},
}
connections = ConnectionHandler(settings_dict)
connections["default"].ensure_connection()
connections["default"].close()
self.assertTrue(os.path.isfile(os.path.join(tmp, "test.db")))
@mock.patch.object(connection, "get_database_version", return_value=(3, 20))
def test_check_database_version_supported(self, mocked_get_database_version):
msg = "SQLite 3.21 or later is required (found 3.20)."
with self.assertRaisesMessage(NotSupportedError, msg):
connection.check_database_version_supported()
self.assertTrue(mocked_get_database_version.called)
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
@isolate_apps("backends")
class SchemaTests(TransactionTestCase):
available_apps = ["backends"]
def test_autoincrement(self):
"""
auto_increment fields are created with the AUTOINCREMENT keyword
in order to be monotonically increasing (#10164).
"""
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(Square)
statements = editor.collected_sql
match = re.search('"id" ([^,]+),', statements[0])
self.assertIsNotNone(match)
self.assertEqual(
"integer NOT NULL PRIMARY KEY AUTOINCREMENT",
match[1],
"Wrong SQL used to create an auto-increment column on SQLite",
)
def test_disable_constraint_checking_failure_disallowed(self):
"""
SQLite schema editor is not usable within an outer transaction if
foreign key constraint checks are not disabled beforehand.
"""
msg = (
"SQLite schema editor cannot be used while foreign key "
"constraint checks are enabled. Make sure to disable them "
"before entering a transaction.atomic() context because "
"SQLite does not support disabling them in the middle of "
"a multi-statement transaction."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic(), connection.schema_editor(atomic=True):
pass
def test_constraint_checks_disabled_atomic_allowed(self):
"""
SQLite schema editor is usable within an outer transaction as long as
foreign key constraints checks are disabled beforehand.
"""
def constraint_checks_enabled():
with connection.cursor() as cursor:
return bool(cursor.execute("PRAGMA foreign_keys").fetchone()[0])
with connection.constraint_checks_disabled(), transaction.atomic():
with connection.schema_editor(atomic=True):
self.assertFalse(constraint_checks_enabled())
self.assertFalse(constraint_checks_enabled())
self.assertTrue(constraint_checks_enabled())
@skipIfDBFeature("supports_atomic_references_rename")
def test_field_rename_inside_atomic_block(self):
"""
NotImplementedError is raised when a model field rename is attempted
inside an atomic block.
"""
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
msg = (
"Renaming the 'backends_author'.'name' column while in a "
"transaction is not supported on SQLite < 3.26 because it would "
"break referential integrity. Try adding `atomic = False` to the "
"Migration class."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
@skipIfDBFeature("supports_atomic_references_rename")
def test_table_rename_inside_atomic_block(self):
"""
NotImplementedError is raised when a table rename is attempted inside
an atomic block.
"""
msg = (
"Renaming the 'backends_author' table while in a transaction is "
"not supported on SQLite < 3.26 because it would break referential "
"integrity. Try adding `atomic = False` to the Migration class."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Author, "backends_author", "renamed_table")
@unittest.skipUnless(connection.vendor == "sqlite", "Test only for SQLite")
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_no_interpolation(self):
# This shouldn't raise an exception (#17158)
query = "SELECT strftime('%Y', 'now');"
with connection.cursor() as cursor:
cursor.execute(query)
self.assertEqual(connection.queries[-1]["sql"], query)
def test_parameter_quoting(self):
# The implementation of last_executed_queries isn't optimal. It's
# worth testing that parameters are quoted (#14091).
query = "SELECT %s"
params = ["\"'\\"]
with connection.cursor() as cursor:
cursor.execute(query, params)
# Note that the single quote is repeated
substituted = "SELECT '\"''\\'"
self.assertEqual(connection.queries[-1]["sql"], substituted)
def test_large_number_of_parameters(self):
# If SQLITE_MAX_VARIABLE_NUMBER (default = 999) has been changed to be
# greater than SQLITE_MAX_COLUMN (default = 2000), last_executed_query
# can hit the SQLITE_MAX_COLUMN limit (#26063).
with connection.cursor() as cursor:
sql = "SELECT MAX(%s)" % ", ".join(["%s"] * 2001)
params = list(range(2001))
# This should not raise an exception.
cursor.db.ops.last_executed_query(cursor.cursor, sql, params)
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
def test_parameter_escaping(self):
# '%s' escaping support for sqlite3 (#13648).
with connection.cursor() as cursor:
cursor.execute("select strftime('%s', date('now'))")
response = cursor.fetchall()[0][0]
# response should be an non-zero integer
self.assertTrue(int(response))
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
class ThreadSharing(TransactionTestCase):
available_apps = ["backends"]
def test_database_sharing_in_threads(self):
def create_object():
Object.objects.create()
create_object()
thread = threading.Thread(target=create_object)
thread.start()
thread.join()
self.assertEqual(Object.objects.count(), 2)
|
aa157505c867388f55c72c91e68492f2f763a67d829c2669e053006552fd48cb | import copy
import unittest
from io import StringIO
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.db import (
DEFAULT_DB_ALIAS,
DatabaseError,
NotSupportedError,
connection,
connections,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.test import TestCase, override_settings
try:
from django.db.backends.postgresql.psycopg_any import errors, is_psycopg3
except ImportError:
is_psycopg3 = False
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL tests")
class Tests(TestCase):
databases = {"default", "other"}
def test_nodb_cursor(self):
"""
The _nodb_cursor() fallbacks to the default connection database when
access to the 'postgres' database is not granted.
"""
orig_connect = BaseDatabaseWrapper.connect
def mocked_connect(self):
if self.settings_dict["NAME"] is None:
raise DatabaseError()
return orig_connect(self)
with connection._nodb_cursor() as cursor:
self.assertIs(cursor.closed, False)
self.assertIsNotNone(cursor.db.connection)
self.assertIsNone(cursor.db.settings_dict["NAME"])
self.assertIs(cursor.closed, True)
self.assertIsNone(cursor.db.connection)
# Now assume the 'postgres' db isn't available
msg = (
"Normally Django will use a connection to the 'postgres' database "
"to avoid running initialization queries against the production "
"database when it's not needed (for example, when running tests). "
"Django was unable to create a connection to the 'postgres' "
"database and will use the first PostgreSQL database instead."
)
with self.assertWarnsMessage(RuntimeWarning, msg):
with mock.patch(
"django.db.backends.base.base.BaseDatabaseWrapper.connect",
side_effect=mocked_connect,
autospec=True,
):
with mock.patch.object(
connection,
"settings_dict",
{**connection.settings_dict, "NAME": "postgres"},
):
with connection._nodb_cursor() as cursor:
self.assertIs(cursor.closed, False)
self.assertIsNotNone(cursor.db.connection)
self.assertIs(cursor.closed, True)
self.assertIsNone(cursor.db.connection)
self.assertIsNotNone(cursor.db.settings_dict["NAME"])
self.assertEqual(
cursor.db.settings_dict["NAME"], connections["other"].settings_dict["NAME"]
)
# Cursor is yielded only for the first PostgreSQL database.
with self.assertWarnsMessage(RuntimeWarning, msg):
with mock.patch(
"django.db.backends.base.base.BaseDatabaseWrapper.connect",
side_effect=mocked_connect,
autospec=True,
):
with connection._nodb_cursor() as cursor:
self.assertIs(cursor.closed, False)
self.assertIsNotNone(cursor.db.connection)
def test_nodb_cursor_raises_postgres_authentication_failure(self):
"""
_nodb_cursor() re-raises authentication failure to the 'postgres' db
when other connection to the PostgreSQL database isn't available.
"""
def mocked_connect(self):
raise DatabaseError()
def mocked_all(self):
test_connection = copy.copy(connections[DEFAULT_DB_ALIAS])
test_connection.settings_dict = copy.deepcopy(connection.settings_dict)
test_connection.settings_dict["NAME"] = "postgres"
return [test_connection]
msg = (
"Normally Django will use a connection to the 'postgres' database "
"to avoid running initialization queries against the production "
"database when it's not needed (for example, when running tests). "
"Django was unable to create a connection to the 'postgres' "
"database and will use the first PostgreSQL database instead."
)
with self.assertWarnsMessage(RuntimeWarning, msg):
mocker_connections_all = mock.patch(
"django.utils.connection.BaseConnectionHandler.all",
side_effect=mocked_all,
autospec=True,
)
mocker_connect = mock.patch(
"django.db.backends.base.base.BaseDatabaseWrapper.connect",
side_effect=mocked_connect,
autospec=True,
)
with mocker_connections_all, mocker_connect:
with self.assertRaises(DatabaseError):
with connection._nodb_cursor():
pass
def test_nodb_cursor_reraise_exceptions(self):
with self.assertRaisesMessage(DatabaseError, "exception"):
with connection._nodb_cursor():
raise DatabaseError("exception")
def test_database_name_too_long(self):
from django.db.backends.postgresql.base import DatabaseWrapper
settings = connection.settings_dict.copy()
max_name_length = connection.ops.max_name_length()
settings["NAME"] = "a" + (max_name_length * "a")
msg = (
"The database name '%s' (%d characters) is longer than "
"PostgreSQL's limit of %s characters. Supply a shorter NAME in "
"settings.DATABASES."
) % (settings["NAME"], max_name_length + 1, max_name_length)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
DatabaseWrapper(settings).get_connection_params()
def test_database_name_empty(self):
from django.db.backends.postgresql.base import DatabaseWrapper
settings = connection.settings_dict.copy()
settings["NAME"] = ""
msg = (
"settings.DATABASES is improperly configured. Please supply the "
"NAME or OPTIONS['service'] value."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
DatabaseWrapper(settings).get_connection_params()
def test_service_name(self):
from django.db.backends.postgresql.base import DatabaseWrapper
settings = connection.settings_dict.copy()
settings["OPTIONS"] = {"service": "my_service"}
settings["NAME"] = ""
params = DatabaseWrapper(settings).get_connection_params()
self.assertEqual(params["service"], "my_service")
self.assertNotIn("database", params)
def test_service_name_default_db(self):
# None is used to connect to the default 'postgres' db.
from django.db.backends.postgresql.base import DatabaseWrapper
settings = connection.settings_dict.copy()
settings["NAME"] = None
settings["OPTIONS"] = {"service": "django_test"}
params = DatabaseWrapper(settings).get_connection_params()
self.assertEqual(params["dbname"], "postgres")
self.assertNotIn("service", params)
def test_connect_and_rollback(self):
"""
PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back (#17062).
"""
new_connection = connection.copy()
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
with new_connection.cursor() as cursor:
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = "Europe/Paris" if db_default_tz == "UTC" else "UTC"
new_connection.close()
# Invalidate timezone name cache, because the setting_changed
# handler cannot know about new_connection.
del new_connection.timezone_name
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
with self.settings(TIME_ZONE=new_tz):
new_connection.set_autocommit(False)
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
with new_connection.cursor() as cursor:
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
new_connection.close()
def test_connect_non_autocommit(self):
"""
The connection wrapper shouldn't believe that autocommit is enabled
after setting the time zone when AUTOCOMMIT is False (#21452).
"""
new_connection = connection.copy()
new_connection.settings_dict["AUTOCOMMIT"] = False
try:
# Open a database connection.
with new_connection.cursor():
self.assertFalse(new_connection.get_autocommit())
finally:
new_connection.close()
def test_connect_isolation_level(self):
"""
The transaction level can be configured with
DATABASES ['OPTIONS']['isolation_level'].
"""
from django.db.backends.postgresql.psycopg_any import IsolationLevel
# Since this is a django.test.TestCase, a transaction is in progress
# and the isolation level isn't reported as 0. This test assumes that
# PostgreSQL is configured with the default isolation level.
# Check the level on the psycopg connection, not the Django wrapper.
self.assertIsNone(connection.connection.isolation_level)
new_connection = connection.copy()
new_connection.settings_dict["OPTIONS"][
"isolation_level"
] = IsolationLevel.SERIALIZABLE
try:
# Start a transaction so the isolation level isn't reported as 0.
new_connection.set_autocommit(False)
# Check the level on the psycopg connection, not the Django wrapper.
self.assertEqual(
new_connection.connection.isolation_level,
IsolationLevel.SERIALIZABLE,
)
finally:
new_connection.close()
def test_connect_invalid_isolation_level(self):
self.assertIsNone(connection.connection.isolation_level)
new_connection = connection.copy()
new_connection.settings_dict["OPTIONS"]["isolation_level"] = -1
msg = (
"Invalid transaction isolation level -1 specified. Use one of the "
"psycopg.IsolationLevel values."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
new_connection.ensure_connection()
def test_connect_role(self):
"""
The session role can be configured with DATABASES
["OPTIONS"]["assume_role"].
"""
try:
custom_role = "django_nonexistent_role"
new_connection = connection.copy()
new_connection.settings_dict["OPTIONS"]["assume_role"] = custom_role
msg = f'role "{custom_role}" does not exist'
with self.assertRaisesMessage(errors.InvalidParameterValue, msg):
new_connection.connect()
finally:
new_connection.close()
def test_connect_no_is_usable_checks(self):
new_connection = connection.copy()
try:
with mock.patch.object(new_connection, "is_usable") as is_usable:
new_connection.connect()
is_usable.assert_not_called()
finally:
new_connection.close()
def _select(self, val):
with connection.cursor() as cursor:
cursor.execute("SELECT %s::text[]", (val,))
return cursor.fetchone()[0]
def test_select_ascii_array(self):
a = ["awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_select_unicode_array(self):
a = ["ᄲawef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_lookup_cast(self):
from django.db.backends.postgresql.operations import DatabaseOperations
do = DatabaseOperations(connection=None)
lookups = (
"iexact",
"contains",
"icontains",
"startswith",
"istartswith",
"endswith",
"iendswith",
"regex",
"iregex",
)
for lookup in lookups:
with self.subTest(lookup=lookup):
self.assertIn("::text", do.lookup_cast(lookup))
# RemovedInDjango51Warning.
for lookup in lookups:
for field_type in ("CICharField", "CIEmailField", "CITextField"):
with self.subTest(lookup=lookup, field_type=field_type):
self.assertIn(
"::citext", do.lookup_cast(lookup, internal_type=field_type)
)
def test_correct_extraction_psycopg_version(self):
from django.db.backends.postgresql.base import Database, psycopg_version
with mock.patch.object(Database, "__version__", "4.2.1 (dt dec pq3 ext lo64)"):
self.assertEqual(psycopg_version(), (4, 2, 1))
with mock.patch.object(
Database, "__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)"
):
self.assertEqual(psycopg_version(), (4, 2))
@override_settings(DEBUG=True)
@unittest.skipIf(is_psycopg3, "psycopg2 specific test")
def test_copy_to_expert_cursors(self):
out = StringIO()
copy_expert_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)"
with connection.cursor() as cursor:
cursor.copy_expert(copy_expert_sql, out)
cursor.copy_to(out, "django_session")
self.assertEqual(
[q["sql"] for q in connection.queries],
[copy_expert_sql, "COPY django_session TO STDOUT"],
)
@override_settings(DEBUG=True)
@unittest.skipUnless(is_psycopg3, "psycopg3 specific test")
def test_copy_cursors(self):
copy_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)"
with connection.cursor() as cursor:
with cursor.copy(copy_sql) as copy:
for row in copy:
pass
self.assertEqual([q["sql"] for q in connection.queries], [copy_sql])
def test_get_database_version(self):
new_connection = connection.copy()
new_connection.pg_version = 110009
self.assertEqual(new_connection.get_database_version(), (11, 9))
@mock.patch.object(connection, "get_database_version", return_value=(11,))
def test_check_database_version_supported(self, mocked_get_database_version):
msg = "PostgreSQL 12 or later is required (found 11)."
with self.assertRaisesMessage(NotSupportedError, msg):
connection.check_database_version_supported()
self.assertTrue(mocked_get_database_version.called)
|
0a3232924205001c55b30ee31fb7f0d0eae96e42ed5f7adad145438a08c6201c | import unittest
from contextlib import contextmanager
from io import StringIO
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.db import DatabaseError, connection
from django.db.backends.base.creation import BaseDatabaseCreation
from django.test import SimpleTestCase
try:
from django.db.backends.postgresql.psycopg_any import errors
except ImportError:
pass
else:
from django.db.backends.postgresql.creation import DatabaseCreation
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL tests")
class DatabaseCreationTests(SimpleTestCase):
@contextmanager
def changed_test_settings(self, **kwargs):
settings = connection.settings_dict["TEST"]
saved_values = {}
for name in kwargs:
if name in settings:
saved_values[name] = settings[name]
for name, value in kwargs.items():
settings[name] = value
try:
yield
finally:
for name in kwargs:
if name in saved_values:
settings[name] = saved_values[name]
else:
del settings[name]
def check_sql_table_creation_suffix(self, settings, expected):
with self.changed_test_settings(**settings):
creation = DatabaseCreation(connection)
suffix = creation.sql_table_creation_suffix()
self.assertEqual(suffix, expected)
def test_sql_table_creation_suffix_with_none_settings(self):
settings = {"CHARSET": None, "TEMPLATE": None}
self.check_sql_table_creation_suffix(settings, "")
def test_sql_table_creation_suffix_with_encoding(self):
settings = {"CHARSET": "UTF8"}
self.check_sql_table_creation_suffix(settings, "WITH ENCODING 'UTF8'")
def test_sql_table_creation_suffix_with_template(self):
settings = {"TEMPLATE": "template0"}
self.check_sql_table_creation_suffix(settings, 'WITH TEMPLATE "template0"')
def test_sql_table_creation_suffix_with_encoding_and_template(self):
settings = {"CHARSET": "UTF8", "TEMPLATE": "template0"}
self.check_sql_table_creation_suffix(
settings, '''WITH ENCODING 'UTF8' TEMPLATE "template0"'''
)
def test_sql_table_creation_raises_with_collation(self):
settings = {"COLLATION": "test"}
msg = (
"PostgreSQL does not support collation setting at database "
"creation time."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.check_sql_table_creation_suffix(settings, None)
def _execute_raise_database_already_exists(self, cursor, parameters, keepdb=False):
error = errors.DuplicateDatabase(
"database %s already exists" % parameters["dbname"]
)
raise DatabaseError() from error
def _execute_raise_permission_denied(self, cursor, parameters, keepdb=False):
error = errors.InsufficientPrivilege("permission denied to create database")
raise DatabaseError() from error
def patch_test_db_creation(self, execute_create_test_db):
return mock.patch.object(
BaseDatabaseCreation, "_execute_create_test_db", execute_create_test_db
)
@mock.patch("sys.stdout", new_callable=StringIO)
@mock.patch("sys.stderr", new_callable=StringIO)
def test_create_test_db(self, *mocked_objects):
creation = DatabaseCreation(connection)
# Simulate test database creation raising "database already exists"
with self.patch_test_db_creation(self._execute_raise_database_already_exists):
with mock.patch("builtins.input", return_value="no"):
with self.assertRaises(SystemExit):
# SystemExit is raised if the user answers "no" to the
# prompt asking if it's okay to delete the test database.
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=False
)
# "Database already exists" error is ignored when keepdb is on
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=True)
# Simulate test database creation raising unexpected error
with self.patch_test_db_creation(self._execute_raise_permission_denied):
with mock.patch.object(
DatabaseCreation, "_database_exists", return_value=False
):
with self.assertRaises(SystemExit):
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=False
)
with self.assertRaises(SystemExit):
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=True
)
# Simulate test database creation raising "insufficient privileges".
# An error shouldn't appear when keepdb is on and the database already
# exists.
with self.patch_test_db_creation(self._execute_raise_permission_denied):
with mock.patch.object(
DatabaseCreation, "_database_exists", return_value=True
):
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=True)
|
671ac94bbe6f8b7b773dd62213d63c4f40d564f1df5c5eb7bb07c6a0196de8d2 | import json
import math
import re
from decimal import Decimal
from django.contrib.gis.db.models import GeometryField, PolygonField, functions
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon, fromstr
from django.contrib.gis.measure import Area
from django.db import NotSupportedError, connection
from django.db.models import IntegerField, Sum, Value
from django.test import TestCase, skipUnlessDBFeature
from ..utils import FuncTestMixin
from .models import City, Country, CountryWebMercator, State, Track
class GISFunctionsTests(FuncTestMixin, TestCase):
"""
Testing functions from django/contrib/gis/db/models/functions.py.
Area/Distance/Length/Perimeter are tested in distapp/tests.
Please keep the tests in function's alphabetic order.
"""
fixtures = ["initial"]
def test_asgeojson(self):
if not connection.features.has_AsGeoJSON_function:
with self.assertRaises(NotSupportedError):
list(Country.objects.annotate(json=functions.AsGeoJSON("mpoly")))
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = json.loads(
'{"type":"Point","crs":{"type":"name","properties":'
'{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
)
victoria_json = json.loads(
'{"type":"Point",'
'"bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],'
'"coordinates":[-123.305196,48.462611]}'
)
chicago_json = json.loads(
'{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},'
'"bbox":[-87.65018,41.85039,-87.65018,41.85039],'
'"coordinates":[-87.65018,41.85039]}'
)
if "crs" in connection.features.unsupported_geojson_options:
del houston_json["crs"]
del chicago_json["crs"]
if "bbox" in connection.features.unsupported_geojson_options:
del chicago_json["bbox"]
del victoria_json["bbox"]
if "precision" in connection.features.unsupported_geojson_options:
chicago_json["coordinates"] = [-87.650175, 41.850385]
# Precision argument should only be an integer
with self.assertRaises(TypeError):
City.objects.annotate(geojson=functions.AsGeoJSON("point", precision="foo"))
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0)
# FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertJSONEqual(
pueblo_json,
City.objects.annotate(geojson=functions.AsGeoJSON("point"))
.get(name="Pueblo")
.geojson,
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertJSONEqual(
City.objects.annotate(json=functions.AsGeoJSON("point", crs=True))
.get(name="Houston")
.json,
houston_json,
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertJSONEqual(
City.objects.annotate(geojson=functions.AsGeoJSON("point", bbox=True))
.get(name="Victoria")
.geojson,
victoria_json,
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
# MariaDB doesn't limit the number of decimals in bbox.
if connection.ops.mariadb:
chicago_json["bbox"] = [-87.650175, 41.850385, -87.650175, 41.850385]
try:
self.assertJSONEqual(
City.objects.annotate(
geojson=functions.AsGeoJSON(
"point", bbox=True, crs=True, precision=5
)
)
.get(name="Chicago")
.geojson,
chicago_json,
)
except AssertionError:
# Give a second chance with different coords rounding.
chicago_json["coordinates"][1] = 41.85038
self.assertJSONEqual(
City.objects.annotate(
geojson=functions.AsGeoJSON(
"point", bbox=True, crs=True, precision=5
)
)
.get(name="Chicago")
.geojson,
chicago_json,
)
@skipUnlessDBFeature("has_AsGML_function")
def test_asgml(self):
# Should throw a TypeError when trying to obtain GML from a
# non-geometry field.
qs = City.objects.all()
with self.assertRaises(TypeError):
qs.annotate(gml=functions.AsGML("name"))
ptown = City.objects.annotate(gml=functions.AsGML("point", precision=9)).get(
name="Pueblo"
)
if connection.ops.oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326" '
r'xmlns:gml="http://www.opengis.net/gml">'
r'<gml:coordinates decimal="\." cs="," ts=" ">'
r"-104.60925\d+,38.25500\d+ "
r"</gml:coordinates></gml:Point>"
)
else:
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>'
r"-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>"
)
self.assertTrue(gml_regex.match(ptown.gml))
self.assertIn(
'<gml:pos srsDimension="2">',
City.objects.annotate(gml=functions.AsGML("point", version=3))
.get(name="Pueblo")
.gml,
)
@skipUnlessDBFeature("has_AsKML_function")
def test_askml(self):
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
with self.assertRaises(TypeError):
City.objects.annotate(kml=functions.AsKML("name"))
# Ensuring the KML is as expected.
ptown = City.objects.annotate(kml=functions.AsKML("point", precision=9)).get(
name="Pueblo"
)
self.assertEqual(
"<Point><coordinates>-104.609252,38.255001</coordinates></Point>", ptown.kml
)
@skipUnlessDBFeature("has_AsSVG_function")
def test_assvg(self):
with self.assertRaises(TypeError):
City.objects.annotate(svg=functions.AsSVG("point", precision="foo"))
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace("c", "")
self.assertEqual(
svg1,
City.objects.annotate(svg=functions.AsSVG("point")).get(name="Pueblo").svg,
)
self.assertEqual(
svg2,
City.objects.annotate(svg=functions.AsSVG("point", relative=5))
.get(name="Pueblo")
.svg,
)
@skipUnlessDBFeature("has_AsWKB_function")
def test_aswkb(self):
wkb = (
City.objects.annotate(
wkb=functions.AsWKB(Point(1, 2, srid=4326)),
)
.first()
.wkb
)
# WKB is either XDR or NDR encoded.
self.assertIn(
bytes(wkb),
(
b"\x00\x00\x00\x00\x01?\xf0\x00\x00\x00\x00\x00\x00@\x00\x00"
b"\x00\x00\x00\x00\x00",
b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0?\x00\x00"
b"\x00\x00\x00\x00\x00@",
),
)
@skipUnlessDBFeature("has_AsWKT_function")
def test_aswkt(self):
wkt = (
City.objects.annotate(
wkt=functions.AsWKT(Point(1, 2, srid=4326)),
)
.first()
.wkt
)
self.assertEqual(
wkt, "POINT (1.0 2.0)" if connection.ops.oracle else "POINT(1 2)"
)
@skipUnlessDBFeature("has_Azimuth_function")
def test_azimuth(self):
# Returns the azimuth in radians.
azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(1, 1, srid=4326))
self.assertAlmostEqual(
City.objects.annotate(azimuth=azimuth_expr).first().azimuth,
math.pi / 4,
places=2,
)
# Returns None if the two points are coincident.
azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(0, 0, srid=4326))
self.assertIsNone(City.objects.annotate(azimuth=azimuth_expr).first().azimuth)
@skipUnlessDBFeature("has_BoundingCircle_function")
def test_bounding_circle(self):
def circle_num_points(num_seg):
# num_seg is the number of segments per quarter circle.
return (4 * num_seg) + 1
expected_areas = (169, 136) if connection.ops.postgis else (171, 126)
qs = Country.objects.annotate(
circle=functions.BoundingCircle("mpoly")
).order_by("name")
self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0)
self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0)
if connection.ops.postgis:
# By default num_seg=48.
self.assertEqual(qs[0].circle.num_points, circle_num_points(48))
self.assertEqual(qs[1].circle.num_points, circle_num_points(48))
tests = [12, Value(12, output_field=IntegerField())]
for num_seq in tests:
with self.subTest(num_seq=num_seq):
qs = Country.objects.annotate(
circle=functions.BoundingCircle("mpoly", num_seg=num_seq),
).order_by("name")
if connection.ops.postgis:
self.assertGreater(qs[0].circle.area, 168.4, 0)
self.assertLess(qs[0].circle.area, 169.5, 0)
self.assertAlmostEqual(qs[1].circle.area, 136, 0)
self.assertEqual(qs[0].circle.num_points, circle_num_points(12))
self.assertEqual(qs[1].circle.num_points, circle_num_points(12))
else:
self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0)
self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0)
@skipUnlessDBFeature("has_Centroid_function")
def test_centroid(self):
qs = State.objects.exclude(poly__isnull=True).annotate(
centroid=functions.Centroid("poly")
)
tol = (
1.8 if connection.ops.mysql else (0.1 if connection.ops.oracle else 0.00001)
)
for state in qs:
self.assertTrue(state.poly.centroid.equals_exact(state.centroid, tol))
with self.assertRaisesMessage(
TypeError, "'Centroid' takes exactly 1 argument (2 given)"
):
State.objects.annotate(centroid=functions.Centroid("poly", "poly"))
@skipUnlessDBFeature("has_Difference_function")
def test_difference(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(diff=functions.Difference("mpoly", geom))
# Oracle does something screwy with the Texas geometry.
if connection.ops.oracle:
qs = qs.exclude(name="Texas")
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.diff))
@skipUnlessDBFeature("has_Difference_function", "has_Transform_function")
def test_difference_mixed_srid(self):
"""Testing with mixed SRID (Country has default 4326)."""
geom = Point(556597.4, 2632018.6, srid=3857) # Spherical Mercator
qs = Country.objects.annotate(difference=functions.Difference("mpoly", geom))
# Oracle does something screwy with the Texas geometry.
if connection.ops.oracle:
qs = qs.exclude(name="Texas")
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.difference))
@skipUnlessDBFeature("has_Envelope_function")
def test_envelope(self):
countries = Country.objects.annotate(envelope=functions.Envelope("mpoly"))
for country in countries:
self.assertTrue(country.envelope.equals(country.mpoly.envelope))
@skipUnlessDBFeature("has_ForcePolygonCW_function")
def test_force_polygon_cw(self):
rings = (
((0, 0), (5, 0), (0, 5), (0, 0)),
((1, 1), (1, 3), (3, 1), (1, 1)),
)
rhr_rings = (
((0, 0), (0, 5), (5, 0), (0, 0)),
((1, 1), (3, 1), (1, 3), (1, 1)),
)
State.objects.create(name="Foo", poly=Polygon(*rings))
st = State.objects.annotate(
force_polygon_cw=functions.ForcePolygonCW("poly")
).get(name="Foo")
self.assertEqual(rhr_rings, st.force_polygon_cw.coords)
@skipUnlessDBFeature("has_GeoHash_function")
def test_geohash(self):
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = "9vk1mfq8jx0c8e0386z6"
h1 = City.objects.annotate(geohash=functions.GeoHash("point")).get(
name="Houston"
)
h2 = City.objects.annotate(geohash=functions.GeoHash("point", precision=5)).get(
name="Houston"
)
self.assertEqual(ref_hash, h1.geohash[: len(ref_hash)])
self.assertEqual(ref_hash[:5], h2.geohash)
@skipUnlessDBFeature("has_GeometryDistance_function")
def test_geometry_distance(self):
point = Point(-90, 40, srid=4326)
qs = City.objects.annotate(
distance=functions.GeometryDistance("point", point)
).order_by("distance")
distances = (
2.99091995527296,
5.33507274054713,
9.33852187483721,
9.91769193646233,
11.556465744884,
14.713098433352,
34.3635252198568,
276.987855073372,
)
for city, expected_distance in zip(qs, distances):
with self.subTest(city=city):
self.assertAlmostEqual(city.distance, expected_distance)
@skipUnlessDBFeature("has_Intersection_function")
def test_intersection(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(inter=functions.Intersection("mpoly", geom))
for c in qs:
if connection.features.empty_intersection_returns_none:
self.assertIsNone(c.inter)
else:
self.assertIs(c.inter.empty, True)
@skipUnlessDBFeature("supports_empty_geometries", "has_IsEmpty_function")
def test_isempty(self):
empty = City.objects.create(name="Nowhere", point=Point(srid=4326))
City.objects.create(name="Somewhere", point=Point(6.825, 47.1, srid=4326))
self.assertSequenceEqual(
City.objects.annotate(isempty=functions.IsEmpty("point")).filter(
isempty=True
),
[empty],
)
self.assertSequenceEqual(City.objects.filter(point__isempty=True), [empty])
@skipUnlessDBFeature("has_IsValid_function")
def test_isvalid(self):
valid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))")
invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))")
State.objects.create(name="valid", poly=valid_geom)
State.objects.create(name="invalid", poly=invalid_geom)
valid = (
State.objects.filter(name="valid")
.annotate(isvalid=functions.IsValid("poly"))
.first()
)
invalid = (
State.objects.filter(name="invalid")
.annotate(isvalid=functions.IsValid("poly"))
.first()
)
self.assertIs(valid.isvalid, True)
self.assertIs(invalid.isvalid, False)
@skipUnlessDBFeature("has_Area_function")
def test_area_with_regular_aggregate(self):
# Create projected country objects, for this test to work on all backends.
for c in Country.objects.all():
CountryWebMercator.objects.create(
name=c.name, mpoly=c.mpoly.transform(3857, clone=True)
)
# Test in projected coordinate system
qs = CountryWebMercator.objects.annotate(area_sum=Sum(functions.Area("mpoly")))
# Some backends (e.g. Oracle) cannot group by multipolygon values, so
# defer such fields in the aggregation query.
for c in qs.defer("mpoly"):
result = c.area_sum
# If the result is a measure object, get value.
if isinstance(result, Area):
result = result.sq_m
self.assertAlmostEqual((result - c.mpoly.area) / c.mpoly.area, 0)
@skipUnlessDBFeature("has_Area_function")
def test_area_lookups(self):
# Create projected countries so the test works on all backends.
CountryWebMercator.objects.bulk_create(
CountryWebMercator(name=c.name, mpoly=c.mpoly.transform(3857, clone=True))
for c in Country.objects.all()
)
qs = CountryWebMercator.objects.annotate(area=functions.Area("mpoly"))
self.assertEqual(
qs.get(area__lt=Area(sq_km=500000)),
CountryWebMercator.objects.get(name="New Zealand"),
)
with self.assertRaisesMessage(
ValueError, "AreaField only accepts Area measurement objects."
):
qs.get(area__lt=500000)
@skipUnlessDBFeature("has_LineLocatePoint_function")
def test_line_locate_point(self):
pos_expr = functions.LineLocatePoint(
LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326)
)
self.assertAlmostEqual(
State.objects.annotate(pos=pos_expr).first().pos, 0.3333333
)
@skipUnlessDBFeature("has_MakeValid_function")
def test_make_valid(self):
invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))")
State.objects.create(name="invalid", poly=invalid_geom)
invalid = (
State.objects.filter(name="invalid")
.annotate(repaired=functions.MakeValid("poly"))
.first()
)
self.assertIs(invalid.repaired.valid, True)
self.assertTrue(
invalid.repaired.equals(
fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))", srid=invalid.poly.srid)
)
)
@skipUnlessDBFeature("has_MakeValid_function")
def test_make_valid_multipolygon(self):
invalid_geom = fromstr(
"POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), (10 0, 10 1, 11 1, 11 0, 10 0))"
)
State.objects.create(name="invalid", poly=invalid_geom)
invalid = (
State.objects.filter(name="invalid")
.annotate(
repaired=functions.MakeValid("poly"),
)
.get()
)
self.assertIs(invalid.repaired.valid, True)
self.assertTrue(
invalid.repaired.equals(
fromstr(
"MULTIPOLYGON (((0 0, 0 1, 1 1, 1 0, 0 0)), "
"((10 0, 10 1, 11 1, 11 0, 10 0)))",
srid=invalid.poly.srid,
)
)
)
self.assertEqual(len(invalid.repaired), 2)
@skipUnlessDBFeature("has_MakeValid_function")
def test_make_valid_output_field(self):
# output_field is GeometryField instance because different geometry
# types can be returned.
output_field = functions.MakeValid(
Value(Polygon(), PolygonField(srid=42)),
).output_field
self.assertIs(output_field.__class__, GeometryField)
self.assertEqual(output_field.srid, 42)
@skipUnlessDBFeature("has_MemSize_function")
def test_memsize(self):
ptown = City.objects.annotate(size=functions.MemSize("point")).get(
name="Pueblo"
)
# Exact value depends on database and version.
self.assertTrue(20 <= ptown.size <= 105)
@skipUnlessDBFeature("has_NumGeom_function")
def test_num_geom(self):
# Both 'countries' only have two geometries.
for c in Country.objects.annotate(num_geom=functions.NumGeometries("mpoly")):
self.assertEqual(2, c.num_geom)
qs = City.objects.filter(point__isnull=False).annotate(
num_geom=functions.NumGeometries("point")
)
for city in qs:
# The results for the number of geometries on non-collections
# depends on the database.
if connection.ops.mysql or connection.ops.mariadb:
self.assertIsNone(city.num_geom)
else:
self.assertEqual(1, city.num_geom)
@skipUnlessDBFeature("has_NumPoint_function")
def test_num_points(self):
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name="Foo", line=LineString(coords))
qs = Track.objects.annotate(num_points=functions.NumPoints("line"))
self.assertEqual(qs.first().num_points, 2)
mpoly_qs = Country.objects.annotate(num_points=functions.NumPoints("mpoly"))
if not connection.features.supports_num_points_poly:
for c in mpoly_qs:
self.assertIsNone(c.num_points)
return
for c in mpoly_qs:
self.assertEqual(c.mpoly.num_points, c.num_points)
for c in City.objects.annotate(num_points=functions.NumPoints("point")):
self.assertEqual(c.num_points, 1)
@skipUnlessDBFeature("has_PointOnSurface_function")
def test_point_on_surface(self):
qs = Country.objects.annotate(
point_on_surface=functions.PointOnSurface("mpoly")
)
for country in qs:
self.assertTrue(country.mpoly.intersection(country.point_on_surface))
@skipUnlessDBFeature("has_Reverse_function")
def test_reverse_geom(self):
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name="Foo", line=LineString(coords))
track = Track.objects.annotate(reverse_geom=functions.Reverse("line")).get(
name="Foo"
)
coords.reverse()
self.assertEqual(tuple(coords), track.reverse_geom.coords)
@skipUnlessDBFeature("has_Scale_function")
def test_scale(self):
xfac, yfac = 2, 3
tol = 5 # The low precision tolerance is for SpatiaLite
qs = Country.objects.annotate(scaled=functions.Scale("mpoly", xfac, yfac))
for country in qs:
for p1, p2 in zip(country.mpoly, country.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
# Test float/Decimal values
qs = Country.objects.annotate(
scaled=functions.Scale("mpoly", 1.5, Decimal("2.5"))
)
self.assertGreater(qs[0].scaled.area, qs[0].mpoly.area)
@skipUnlessDBFeature("has_SnapToGrid_function")
def test_snap_to_grid(self):
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
with self.assertRaises(ValueError):
Country.objects.annotate(snap=functions.SnapToGrid("mpoly", *bad_args))
for bad_args in (("1.0",), (1.0, None), tuple(map(str, range(4)))):
with self.assertRaises(TypeError):
Country.objects.annotate(snap=functions.SnapToGrid("mpoly", *bad_args))
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = (
"MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,"
"12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,"
"12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,"
"12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,"
"12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,"
"12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,"
"12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,"
"12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))"
)
Country.objects.create(name="San Marino", mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1))
# FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr("MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))")
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(snap=functions.SnapToGrid("mpoly", 0.1))
.get(name="San Marino")
.snap,
tol,
)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23))
# FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
"MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))"
)
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(snap=functions.SnapToGrid("mpoly", 0.05, 0.23))
.get(name="San Marino")
.snap,
tol,
)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23))
# FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
"MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,"
"12.45 43.87,12.4 43.87)))"
)
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(
snap=functions.SnapToGrid("mpoly", 0.05, 0.23, 0.5, 0.17)
)
.get(name="San Marino")
.snap,
tol,
)
)
@skipUnlessDBFeature("has_SymDifference_function")
def test_sym_difference(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(
sym_difference=functions.SymDifference("mpoly", geom)
)
# Oracle does something screwy with the Texas geometry.
if connection.ops.oracle:
qs = qs.exclude(name="Texas")
for country in qs:
self.assertTrue(
country.mpoly.sym_difference(geom).equals(country.sym_difference)
)
@skipUnlessDBFeature("has_Transform_function")
def test_transform(self):
# Pre-transformed points for Houston and Pueblo.
ptown = fromstr("POINT(992363.390841912 481455.395105533)", srid=2774)
# Asserting the result of the transform operation with the values in
# the pre-transformed points.
h = City.objects.annotate(pt=functions.Transform("point", ptown.srid)).get(
name="Pueblo"
)
self.assertEqual(2774, h.pt.srid)
# Precision is low due to version variations in PROJ and GDAL.
self.assertLess(ptown.x - h.pt.x, 1)
self.assertLess(ptown.y - h.pt.y, 1)
@skipUnlessDBFeature("has_Translate_function")
def test_translate(self):
xfac, yfac = 5, -23
qs = Country.objects.annotate(
translated=functions.Translate("mpoly", xfac, yfac)
)
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
# Some combined function tests
@skipUnlessDBFeature(
"has_Difference_function",
"has_Intersection_function",
"has_SymDifference_function",
"has_Union_function",
)
def test_diff_intersection_union(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(
difference=functions.Difference("mpoly", geom),
sym_difference=functions.SymDifference("mpoly", geom),
union=functions.Union("mpoly", geom),
intersection=functions.Intersection("mpoly", geom),
)
if connection.ops.oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
return
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.difference))
if connection.features.empty_intersection_returns_none:
self.assertIsNone(c.intersection)
else:
self.assertIs(c.intersection.empty, True)
self.assertTrue(c.mpoly.sym_difference(geom).equals(c.sym_difference))
self.assertTrue(c.mpoly.union(geom).equals(c.union))
@skipUnlessDBFeature("has_Union_function")
def test_union(self):
"""Union with all combinations of geometries/geometry fields."""
geom = Point(-95.363151, 29.763374, srid=4326)
union = (
City.objects.annotate(union=functions.Union("point", geom))
.get(name="Dallas")
.union
)
expected = fromstr(
"MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)", srid=4326
)
self.assertTrue(expected.equals(union))
union = (
City.objects.annotate(union=functions.Union(geom, "point"))
.get(name="Dallas")
.union
)
self.assertTrue(expected.equals(union))
union = (
City.objects.annotate(union=functions.Union("point", "point"))
.get(name="Dallas")
.union
)
expected = GEOSGeometry("POINT(-96.801611 32.782057)", srid=4326)
self.assertTrue(expected.equals(union))
union = (
City.objects.annotate(union=functions.Union(geom, geom))
.get(name="Dallas")
.union
)
self.assertTrue(geom.equals(union))
@skipUnlessDBFeature("has_Union_function", "has_Transform_function")
def test_union_mixed_srid(self):
"""The result SRID depends on the order of parameters."""
geom = Point(61.42915, 55.15402, srid=4326)
geom_3857 = geom.transform(3857, clone=True)
tol = 0.001
for city in City.objects.annotate(union=functions.Union("point", geom_3857)):
expected = city.point | geom
self.assertTrue(city.union.equals_exact(expected, tol))
self.assertEqual(city.union.srid, 4326)
for city in City.objects.annotate(union=functions.Union(geom_3857, "point")):
expected = geom_3857 | city.point.transform(3857, clone=True)
self.assertTrue(expected.equals_exact(city.union, tol))
self.assertEqual(city.union.srid, 3857)
def test_argument_validation(self):
with self.assertRaisesMessage(
ValueError, "SRID is required for all geometries."
):
City.objects.annotate(geo=functions.GeoFunc(Point(1, 1)))
msg = "GeoFunc function requires a GeometryField in position 1, got CharField."
with self.assertRaisesMessage(TypeError, msg):
City.objects.annotate(geo=functions.GeoFunc("name"))
msg = "GeoFunc function requires a geometric argument in position 1."
with self.assertRaisesMessage(TypeError, msg):
City.objects.annotate(union=functions.GeoFunc(1, "point")).get(
name="Dallas"
)
|
cb927cf2c54fdef3d23c1362f944426c8936d401a85a0370a3acb25631a0189a | import tempfile
from io import StringIO
from django.contrib.gis import gdal
from django.contrib.gis.db.models import Extent, MakeLine, Union, functions
from django.contrib.gis.geos import (
GeometryCollection,
GEOSGeometry,
LinearRing,
LineString,
MultiLineString,
MultiPoint,
MultiPolygon,
Point,
Polygon,
fromstr,
)
from django.core.management import call_command
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import F, OuterRef, Subquery
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from ..utils import skipUnlessGISLookup
from .models import (
City,
Country,
Feature,
MinusOneSRID,
MultiFields,
NonConcreteModel,
PennsylvaniaCity,
State,
Track,
)
class GeoModelTest(TestCase):
fixtures = ["initial"]
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name="NullCity", point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
with self.assertRaisesMessage(TypeError, "Cannot set"):
nullcity.point = bad
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name="NullCity").point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(
Point(23, 5, srid=4326), City.objects.get(name="NullCity").point
)
nullcity.save()
self.assertEqual(
Point(23, 5, srid=4326), City.objects.get(name="NullCity").point
)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 90), (100, 90), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name="NullState", poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name="NullState")
self.assertEqual(connection.ops.Adapter._fix_polygon(ply), ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual("WGS 84", ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(
connection.ops.Adapter._fix_polygon(ply),
State.objects.get(name="NullState").poly,
)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = "POINT (-98.493183 29.424170)"
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# San Antonio in 'WGS 84 / Pseudo-Mercator' (SRID 3857)
other_srid_pnt = wgs_pnt.transform(3857, clone=True)
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
if connection.ops.oracle:
tx = Country.objects.get(mpoly__contains=other_srid_pnt)
else:
tx = Country.objects.get(mpoly__intersects=other_srid_pnt)
self.assertEqual("Texas", tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name="San Antonio", point=other_srid_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name="San Antonio")
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertIsNone(c.point)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name="Point", geom=Point(1, 1)).save()
Feature(name="LineString", geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(
name="Polygon",
geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))),
).save()
Feature(
name="GeometryCollection",
geom=GeometryCollection(
Point(2, 2),
LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))),
),
).save()
f_1 = Feature.objects.get(name="Point")
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name="LineString")
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name="Polygon")
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name="GeometryCollection")
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Database functions on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(
name="Mansfield", county="Tioga", point="POINT(-77.071445 41.823881)"
)
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.annotate(
new_point=functions.Transform("point", srid=32128)
)
self.assertEqual(1, qs.count())
for pc in qs:
self.assertEqual(32128, pc.new_point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
point_select = connection.ops.select % "point"
cities2 = list(
City.objects.raw(
"select id, name, %s as point from geoapp_city" % point_select
)
)
self.assertEqual(len(cities1), len(cities2))
with self.assertNumQueries(0): # Ensure point isn't deferred.
self.assertIsInstance(cities2[0].point, Point)
def test_gis_query_as_string(self):
"""GIS queries can be represented as strings."""
query = City.objects.filter(point__within=Polygon.from_bbox((0, 0, 2, 2)))
self.assertIn(
connection.ops.quote_name(City._meta.db_table),
str(query.query),
)
def test_dumpdata_loaddata_cycle(self):
"""
Test a dumpdata/loaddata cycle with geographic data.
"""
out = StringIO()
original_data = list(City.objects.order_by("name"))
call_command("dumpdata", "geoapp.City", stdout=out)
result = out.getvalue()
houston = City.objects.get(name="Houston")
self.assertIn('"point": "%s"' % houston.point.ewkt, result)
# Reload now dumped data
with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as tmp:
tmp.write(result)
tmp.seek(0)
call_command("loaddata", tmp.name, verbosity=0)
self.assertEqual(original_data, list(City.objects.order_by("name")))
@skipUnlessDBFeature("supports_empty_geometries")
def test_empty_geometries(self):
geometry_classes = [
Point,
LineString,
LinearRing,
Polygon,
MultiPoint,
MultiLineString,
MultiPolygon,
GeometryCollection,
]
for klass in geometry_classes:
g = klass(srid=4326)
feature = Feature.objects.create(name="Empty %s" % klass.__name__, geom=g)
feature.refresh_from_db()
if klass is LinearRing:
# LinearRing isn't representable in WKB, so GEOSGeomtry.wkb
# uses LineString instead.
g = LineString(srid=4326)
self.assertEqual(feature.geom, g)
self.assertEqual(feature.geom.srid, g.srid)
class GeoLookupTest(TestCase):
fixtures = ["initial"]
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name="Pueblo")
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual("Kansas", qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name="Texas")
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if connection.features.supports_contained_lookup:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ["Houston", "Dallas", "Oklahoma City"]
for c in qs:
self.assertIn(c.name, cities)
# Pulling out some cities.
houston = City.objects.get(name="Houston")
wellington = City.objects.get(name="Wellington")
pueblo = City.objects.get(name="Pueblo")
okcity = City.objects.get(name="Oklahoma City")
lawrence = City.objects.get(name="Lawrence")
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(
mpoly__contains=wellington.point.hex
) # Query w/EWKBHEX
self.assertEqual("Texas", tx.name)
self.assertEqual("New Zealand", nz.name)
# Testing `contains` on the states using the point for Lawrence.
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual("Kansas", ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding
# box of Texas) are not contained in Texas or New Zealand.
self.assertEqual(
len(Country.objects.filter(mpoly__contains=pueblo.point)), 0
) # Query w/GEOSGeometry object
self.assertEqual(
len(Country.objects.filter(mpoly__contains=okcity.point.wkt)), 0
) # Query w/WKT
# OK City is contained w/in bounding box of Texas.
if connection.features.supports_bbcontains_lookup:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual("Texas", qs[0].name)
@skipUnlessDBFeature("supports_crosses_lookup")
def test_crosses_lookup(self):
Track.objects.create(name="Line1", line=LineString([(-95, 29), (-60, 0)]))
self.assertEqual(
Track.objects.filter(
line__crosses=LineString([(-95, 0), (-60, 29)])
).count(),
1,
)
self.assertEqual(
Track.objects.filter(
line__crosses=LineString([(-95, 30), (0, 30)])
).count(),
0,
)
@skipUnlessDBFeature("supports_isvalid_lookup")
def test_isvalid_lookup(self):
invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))")
State.objects.create(name="invalid", poly=invalid_geom)
qs = State.objects.all()
if connection.ops.oracle:
# Kansas has adjacent vertices with distance 6.99244813842e-12
# which is smaller than the default Oracle tolerance.
qs = qs.exclude(name="Kansas")
self.assertEqual(
State.objects.filter(name="Kansas", poly__isvalid=False).count(), 1
)
self.assertEqual(qs.filter(poly__isvalid=False).count(), 1)
self.assertEqual(qs.filter(poly__isvalid=True).count(), qs.count() - 1)
@skipUnlessGISLookup("left", "right")
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name="Colorado").poly
ks_border = State.objects.get(name="Kansas").poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = [
"Houston",
"Dallas",
"Oklahoma City",
"Lawrence",
"Chicago",
"Wellington",
]
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# These cities should be strictly to the right of the KS border.
cities = ["Chicago", "Wellington"]
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual("Victoria", vic.name)
cities = ["Pueblo", "Victoria"]
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
@skipUnlessGISLookup("strictly_above", "strictly_below")
def test_strictly_above_below_lookups(self):
dallas = City.objects.get(name="Dallas")
self.assertQuerySetEqual(
City.objects.filter(point__strictly_above=dallas.point).order_by("name"),
["Chicago", "Lawrence", "Oklahoma City", "Pueblo", "Victoria"],
lambda b: b.name,
)
self.assertQuerySetEqual(
City.objects.filter(point__strictly_below=dallas.point).order_by("name"),
["Houston", "Wellington"],
lambda b: b.name,
)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr("POINT (-95.363151 29.763374)", srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]:
self.assertEqual("Houston", c.name)
@skipUnlessDBFeature("supports_null_geometries")
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name="Puerto Rico")
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual("Puerto Rico", nullqs[0].name)
# GeometryField=None is an alias for __isnull=True.
self.assertCountEqual(State.objects.filter(poly=None), nullqs)
self.assertCountEqual(State.objects.exclude(poly=None), validqs)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertIn("Colorado", state_names)
self.assertIn("Kansas", state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name="Northern Mariana Islands", poly=None)
self.assertIsNone(nmi.poly)
# Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = "POLYGON((0 0,1 0,1 1,1 0,0 0))"
nmi.save()
State.objects.filter(name="Northern Mariana Islands").update(poly=None)
self.assertIsNone(State.objects.get(name="Northern Mariana Islands").poly)
@skipUnlessDBFeature(
"supports_null_geometries", "supports_crosses_lookup", "supports_relate_lookup"
)
def test_null_geometries_excluded_in_lookups(self):
"""NULL features are excluded in spatial lookup functions."""
null = State.objects.create(name="NULL", poly=None)
queries = [
("equals", Point(1, 1)),
("disjoint", Point(1, 1)),
("touches", Point(1, 1)),
("crosses", LineString((0, 0), (1, 1), (5, 5))),
("within", Point(1, 1)),
("overlaps", LineString((0, 0), (1, 1), (5, 5))),
("contains", LineString((0, 0), (1, 1), (5, 5))),
("intersects", LineString((0, 0), (1, 1), (5, 5))),
("relate", (Point(1, 1), "T*T***FF*")),
("same_as", Point(1, 1)),
("exact", Point(1, 1)),
("coveredby", Point(1, 1)),
("covers", Point(1, 1)),
]
for lookup, geom in queries:
with self.subTest(lookup=lookup):
self.assertNotIn(
null, State.objects.filter(**{"poly__%s" % lookup: geom})
)
def test_wkt_string_in_lookup(self):
# Valid WKT strings don't emit error logs.
with self.assertNoLogs("django.contrib.gis", "ERROR"):
State.objects.filter(poly__intersects="LINESTRING(0 0, 1 1, 5 5)")
@skipUnlessDBFeature("supports_relate_lookup")
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr("POINT (649287.0363174 4177429.4494686)", srid=2847)
pnt2 = fromstr("POINT(-98.4919715741052 29.4333344025053)", srid=4326)
# Not passing in a geometry as first param raises a TypeError when
# initializing the QuerySet.
with self.assertRaises(ValueError):
Country.objects.filter(mpoly__relate=(23, "foo"))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [
((pnt1, 0), ValueError),
((pnt2, "T*T***FF*", 0), ValueError),
]:
qs = Country.objects.filter(mpoly__relate=bad_args)
with self.assertRaises(e):
qs.count()
contains_mask = "T*T***FF*"
within_mask = "T*F**F***"
intersects_mask = "T********"
# Relate works differently on Oracle.
if connection.ops.oracle:
contains_mask = "contains"
within_mask = "inside"
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = "overlapbdyintersect"
# Testing contains relation mask.
if connection.features.supports_transform:
self.assertEqual(
Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name,
"Texas",
)
self.assertEqual(
"Texas", Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name
)
# Testing within relation mask.
ks = State.objects.get(name="Kansas")
self.assertEqual(
"Lawrence", City.objects.get(point__relate=(ks.poly, within_mask)).name
)
# Testing intersection relation mask.
if not connection.ops.oracle:
if connection.features.supports_transform:
self.assertEqual(
Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name,
"Texas",
)
self.assertEqual(
"Texas", Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name
)
self.assertEqual(
"Lawrence",
City.objects.get(point__relate=(ks.poly, intersects_mask)).name,
)
# With a complex geometry expression
mask = "anyinteract" if connection.ops.oracle else within_mask
self.assertFalse(
City.objects.exclude(
point__relate=(functions.Union("point", "point"), mask)
)
)
def test_gis_lookups_with_complex_expressions(self):
multiple_arg_lookups = {
"dwithin",
"relate",
} # These lookups are tested elsewhere.
lookups = connection.ops.gis_operators.keys() - multiple_arg_lookups
self.assertTrue(lookups, "No lookups found")
for lookup in lookups:
with self.subTest(lookup):
City.objects.filter(
**{"point__" + lookup: functions.Union("point", "point")}
).exists()
def test_subquery_annotation(self):
multifields = MultiFields.objects.create(
city=City.objects.create(point=Point(1, 1)),
point=Point(2, 2),
poly=Polygon.from_bbox((0, 0, 2, 2)),
)
qs = MultiFields.objects.annotate(
city_point=Subquery(
City.objects.filter(
id=OuterRef("city"),
).values("point")
),
).filter(
city_point__within=F("poly"),
)
self.assertEqual(qs.get(), multifields)
class GeoQuerySetTest(TestCase):
# TODO: GeoQuerySet is removed, organize these test better.
fixtures = ["initial"]
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent(self):
"""
Testing the `Extent` aggregate.
"""
# Reference query:
# SELECT ST_extent(point)
# FROM geoapp_city
# WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (
-96.8016128540039,
29.7633724212646,
-95.3631439208984,
32.782058715820,
)
qs = City.objects.filter(name__in=("Houston", "Dallas"))
extent = qs.aggregate(Extent("point"))["point__extent"]
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
self.assertIsNone(
City.objects.filter(name=("Smalltown")).aggregate(Extent("point"))[
"point__extent"
]
)
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent_with_limit(self):
"""
Testing if extent supports limit.
"""
extent1 = City.objects.aggregate(Extent("point"))["point__extent"]
extent2 = City.objects.all()[:3].aggregate(Extent("point"))["point__extent"]
self.assertNotEqual(extent1, extent2)
def test_make_line(self):
"""
Testing the `MakeLine` aggregate.
"""
if not connection.features.supports_make_line_aggr:
with self.assertRaises(NotSupportedError):
City.objects.aggregate(MakeLine("point"))
return
# MakeLine on an inappropriate field returns simply None
self.assertIsNone(State.objects.aggregate(MakeLine("poly"))["poly__makeline"])
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
line = City.objects.aggregate(MakeLine("point"))["point__makeline"]
ref_points = City.objects.values_list("point", flat=True)
self.assertIsInstance(line, LineString)
self.assertEqual(len(line), ref_points.count())
# Compare pairs of manually sorted points, as the default ordering is
# flaky.
for (point, ref_city) in zip(sorted(line), sorted(ref_points)):
point_x, point_y = point
self.assertAlmostEqual(point_x, ref_city.x, 5),
self.assertAlmostEqual(point_y, ref_city.y, 5),
@skipUnlessDBFeature("supports_union_aggr")
def test_unionagg(self):
"""
Testing the `Union` aggregate.
"""
tx = Country.objects.get(name="Texas").mpoly
# Houston, Dallas -- Ordering may differ depending on backend or GEOS version.
union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)")
qs = City.objects.filter(point__within=tx)
with self.assertRaises(ValueError):
qs.aggregate(Union("name"))
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.aggregate(Union("point"))["point__union"]
u2 = qs.order_by("name").aggregate(Union("point"))["point__union"]
self.assertTrue(union.equals(u1))
self.assertTrue(union.equals(u2))
qs = City.objects.filter(name="NotACity")
self.assertIsNone(qs.aggregate(Union("point"))["point__union"])
@skipUnlessDBFeature("supports_union_aggr")
def test_geoagg_subquery(self):
tx = Country.objects.get(name="Texas")
union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)")
# Use distinct() to force the usage of a subquery for aggregation.
with CaptureQueriesContext(connection) as ctx:
self.assertIs(
union.equals(
City.objects.filter(point__within=tx.mpoly)
.distinct()
.aggregate(
Union("point"),
)["point__union"],
),
True,
)
self.assertIn("subquery", ctx.captured_queries[0]["sql"])
@skipUnlessDBFeature("supports_tolerance_parameter")
def test_unionagg_tolerance(self):
City.objects.create(
point=fromstr("POINT(-96.467222 32.751389)", srid=4326),
name="Forney",
)
tx = Country.objects.get(name="Texas").mpoly
# Tolerance is greater than distance between Forney and Dallas, that's
# why Dallas is ignored.
forney_houston = GEOSGeometry(
"MULTIPOINT(-95.363151 29.763374, -96.467222 32.751389)",
srid=4326,
)
self.assertIs(
forney_houston.equals_exact(
City.objects.filter(point__within=tx).aggregate(
Union("point", tolerance=32000),
)["point__union"],
tolerance=10e-6,
),
True,
)
@skipUnlessDBFeature("supports_tolerance_parameter")
def test_unionagg_tolerance_escaping(self):
tx = Country.objects.get(name="Texas").mpoly
with self.assertRaises(DatabaseError):
City.objects.filter(point__within=tx).aggregate(
Union("point", tolerance="0.05))), (((1"),
)
def test_within_subquery(self):
"""
Using a queryset inside a geo lookup is working (using a subquery)
(#14483).
"""
tex_cities = City.objects.filter(
point__within=Country.objects.filter(name="Texas").values("mpoly")
).order_by("name")
self.assertEqual(
list(tex_cities.values_list("name", flat=True)), ["Dallas", "Houston"]
)
def test_non_concrete_field(self):
NonConcreteModel.objects.create(point=Point(0, 0), name="name")
list(NonConcreteModel.objects.all())
def test_values_srid(self):
for c, v in zip(City.objects.all(), City.objects.values()):
self.assertEqual(c.point.srid, v["point"].srid)
|
a848ccef80f8887d294a4e09bcf1b657f5b814b56eed162d06669a56d147ce23 | import unittest
from datetime import datetime, timedelta
from datetime import timezone as datetime_timezone
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.db import DataError, OperationalError
from django.db.models import (
DateField,
DateTimeField,
F,
IntegerField,
Max,
OuterRef,
Subquery,
TimeField,
)
from django.db.models.functions import (
Extract,
ExtractDay,
ExtractHour,
ExtractIsoWeekDay,
ExtractIsoYear,
ExtractMinute,
ExtractMonth,
ExtractQuarter,
ExtractSecond,
ExtractWeek,
ExtractWeekDay,
ExtractYear,
Trunc,
TruncDate,
TruncDay,
TruncHour,
TruncMinute,
TruncMonth,
TruncQuarter,
TruncSecond,
TruncTime,
TruncWeek,
TruncYear,
)
from django.test import (
TestCase,
ignore_warnings,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from ..models import Author, DTModel, Fan
HAS_PYTZ = pytz is not None
if not HAS_PYTZ:
needs_pytz = unittest.skip("Test requires pytz")
else:
def needs_pytz(f):
return f
ZONE_CONSTRUCTORS = (zoneinfo.ZoneInfo,)
if HAS_PYTZ:
ZONE_CONSTRUCTORS += (pytz.timezone,)
def truncate_to(value, kind, tzinfo=None):
# Convert to target timezone before truncation
if tzinfo is not None:
value = value.astimezone(tzinfo)
def truncate(value, kind):
if kind == "second":
return value.replace(microsecond=0)
if kind == "minute":
return value.replace(second=0, microsecond=0)
if kind == "hour":
return value.replace(minute=0, second=0, microsecond=0)
if kind == "day":
if isinstance(value, datetime):
return value.replace(hour=0, minute=0, second=0, microsecond=0)
return value
if kind == "week":
if isinstance(value, datetime):
return (value - timedelta(days=value.weekday())).replace(
hour=0, minute=0, second=0, microsecond=0
)
return value - timedelta(days=value.weekday())
if kind == "month":
if isinstance(value, datetime):
return value.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
return value.replace(day=1)
if kind == "quarter":
month_in_quarter = value.month - (value.month - 1) % 3
if isinstance(value, datetime):
return value.replace(
month=month_in_quarter,
day=1,
hour=0,
minute=0,
second=0,
microsecond=0,
)
return value.replace(month=month_in_quarter, day=1)
# otherwise, truncate to year
if isinstance(value, datetime):
return value.replace(
month=1, day=1, hour=0, minute=0, second=0, microsecond=0
)
return value.replace(month=1, day=1)
value = truncate(value, kind)
if tzinfo is not None:
# If there was a daylight saving transition, then reset the timezone.
value = timezone.make_aware(value.replace(tzinfo=None), tzinfo)
return value
@override_settings(USE_TZ=False)
class DateFunctionTests(TestCase):
def create_model(self, start_datetime, end_datetime):
return DTModel.objects.create(
name=start_datetime.isoformat() if start_datetime else "None",
start_datetime=start_datetime,
end_datetime=end_datetime,
start_date=start_datetime.date() if start_datetime else None,
end_date=end_datetime.date() if end_datetime else None,
start_time=start_datetime.time() if start_datetime else None,
end_time=end_datetime.time() if end_datetime else None,
duration=(end_datetime - start_datetime)
if start_datetime and end_datetime
else None,
)
def test_extract_year_exact_lookup(self):
"""
Extract year uses a BETWEEN filter to compare the year to allow indexes
to be used.
"""
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
for lookup in ("year", "iso_year"):
with self.subTest(lookup):
qs = DTModel.objects.filter(
**{"start_datetime__%s__exact" % lookup: 2015}
)
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(" between "), 1)
self.assertEqual(query_string.count("extract"), 0)
# exact is implied and should be the same
qs = DTModel.objects.filter(**{"start_datetime__%s" % lookup: 2015})
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(" between "), 1)
self.assertEqual(query_string.count("extract"), 0)
# date and datetime fields should behave the same
qs = DTModel.objects.filter(**{"start_date__%s" % lookup: 2015})
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(" between "), 1)
self.assertEqual(query_string.count("extract"), 0)
# an expression rhs cannot use the between optimization.
qs = DTModel.objects.annotate(
start_year=ExtractYear("start_datetime"),
).filter(end_datetime__year=F("start_year") + 1)
self.assertEqual(qs.count(), 1)
query_string = str(qs.query).lower()
self.assertEqual(query_string.count(" between "), 0)
self.assertEqual(query_string.count("extract"), 3)
def test_extract_year_greaterthan_lookup(self):
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
for lookup in ("year", "iso_year"):
with self.subTest(lookup):
qs = DTModel.objects.filter(**{"start_datetime__%s__gt" % lookup: 2015})
self.assertEqual(qs.count(), 1)
self.assertEqual(str(qs.query).lower().count("extract"), 0)
qs = DTModel.objects.filter(
**{"start_datetime__%s__gte" % lookup: 2015}
)
self.assertEqual(qs.count(), 2)
self.assertEqual(str(qs.query).lower().count("extract"), 0)
qs = DTModel.objects.annotate(
start_year=ExtractYear("start_datetime"),
).filter(**{"end_datetime__%s__gte" % lookup: F("start_year")})
self.assertEqual(qs.count(), 1)
self.assertGreaterEqual(str(qs.query).lower().count("extract"), 2)
def test_extract_year_lessthan_lookup(self):
start_datetime = datetime(2015, 6, 15, 14, 10)
end_datetime = datetime(2016, 6, 15, 14, 10)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
for lookup in ("year", "iso_year"):
with self.subTest(lookup):
qs = DTModel.objects.filter(**{"start_datetime__%s__lt" % lookup: 2016})
self.assertEqual(qs.count(), 1)
self.assertEqual(str(qs.query).count("extract"), 0)
qs = DTModel.objects.filter(
**{"start_datetime__%s__lte" % lookup: 2016}
)
self.assertEqual(qs.count(), 2)
self.assertEqual(str(qs.query).count("extract"), 0)
qs = DTModel.objects.annotate(
end_year=ExtractYear("end_datetime"),
).filter(**{"start_datetime__%s__lte" % lookup: F("end_year")})
self.assertEqual(qs.count(), 1)
self.assertGreaterEqual(str(qs.query).lower().count("extract"), 2)
def test_extract_lookup_name_sql_injection(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
with self.assertRaises((OperationalError, ValueError)):
DTModel.objects.filter(
start_datetime__year=Extract(
"start_datetime", "day' FROM start_datetime)) OR 1=1;--"
)
).exists()
def test_extract_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
with self.assertRaisesMessage(ValueError, "lookup_name must be provided"):
Extract("start_datetime")
msg = (
"Extract input expression must be DateField, DateTimeField, TimeField, or "
"DurationField."
)
with self.assertRaisesMessage(ValueError, msg):
list(DTModel.objects.annotate(extracted=Extract("name", "hour")))
with self.assertRaisesMessage(
ValueError,
"Cannot extract time component 'second' from DateField 'start_date'.",
):
list(DTModel.objects.annotate(extracted=Extract("start_date", "second")))
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "year")
).order_by("start_datetime"),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "quarter")
).order_by("start_datetime"),
[(start_datetime, 2), (end_datetime, 2)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "month")
).order_by("start_datetime"),
[
(start_datetime, start_datetime.month),
(end_datetime, end_datetime.month),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "day")
).order_by("start_datetime"),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "week")
).order_by("start_datetime"),
[(start_datetime, 25), (end_datetime, 24)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "week_day")
).order_by("start_datetime"),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "iso_week_day"),
).order_by("start_datetime"),
[
(start_datetime, start_datetime.isoweekday()),
(end_datetime, end_datetime.isoweekday()),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "hour")
).order_by("start_datetime"),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "minute")
).order_by("start_datetime"),
[
(start_datetime, start_datetime.minute),
(end_datetime, end_datetime.minute),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=Extract("start_datetime", "second")
).order_by("start_datetime"),
[
(start_datetime, start_datetime.second),
(end_datetime, end_datetime.second),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__year=Extract("start_datetime", "year")
).count(),
2,
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__hour=Extract("start_datetime", "hour")
).count(),
2,
)
self.assertEqual(
DTModel.objects.filter(
start_date__month=Extract("start_date", "month")
).count(),
2,
)
self.assertEqual(
DTModel.objects.filter(
start_time__hour=Extract("start_time", "hour")
).count(),
2,
)
def test_extract_none(self):
self.create_model(None, None)
for t in (
Extract("start_datetime", "year"),
Extract("start_date", "year"),
Extract("start_time", "hour"),
):
with self.subTest(t):
self.assertIsNone(
DTModel.objects.annotate(extracted=t).first().extracted
)
def test_extract_outerref_validation(self):
inner_qs = DTModel.objects.filter(name=ExtractMonth(OuterRef("name")))
msg = (
"Extract input expression must be DateField, DateTimeField, "
"TimeField, or DurationField."
)
with self.assertRaisesMessage(ValueError, msg):
DTModel.objects.annotate(related_name=Subquery(inner_qs.values("name")[:1]))
@skipUnlessDBFeature("has_native_duration_field")
def test_extract_duration(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=Extract("duration", "second")).order_by(
"start_datetime"
),
[
(start_datetime, (end_datetime - start_datetime).seconds % 60),
(end_datetime, (start_datetime - end_datetime).seconds % 60),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.annotate(
duration_days=Extract("duration", "day"),
)
.filter(duration_days__gt=200)
.count(),
1,
)
@skipIfDBFeature("has_native_duration_field")
def test_extract_duration_without_native_duration_field(self):
msg = "Extract requires native DurationField database support."
with self.assertRaisesMessage(ValueError, msg):
list(DTModel.objects.annotate(extracted=Extract("duration", "second")))
def test_extract_duration_unsupported_lookups(self):
msg = "Cannot extract component '%s' from DurationField 'duration'."
for lookup in (
"year",
"iso_year",
"month",
"week",
"week_day",
"iso_week_day",
"quarter",
):
with self.subTest(lookup):
with self.assertRaisesMessage(ValueError, msg % lookup):
DTModel.objects.annotate(extracted=Extract("duration", lookup))
def test_extract_year_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractYear("start_datetime")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractYear("start_date")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__year=ExtractYear("start_datetime")
).count(),
2,
)
def test_extract_iso_year_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractIsoYear("start_datetime")
).order_by("start_datetime"),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractIsoYear("start_date")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)],
lambda m: (m.start_datetime, m.extracted),
)
# Both dates are from the same week year.
self.assertEqual(
DTModel.objects.filter(
start_datetime__iso_year=ExtractIsoYear("start_datetime")
).count(),
2,
)
def test_extract_iso_year_func_boundaries(self):
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
end_datetime = timezone.make_aware(end_datetime)
week_52_day_2014 = datetime(2014, 12, 27, 13, 0) # Sunday
week_1_day_2014_2015 = datetime(2014, 12, 31, 13, 0) # Wednesday
week_53_day_2015 = datetime(2015, 12, 31, 13, 0) # Thursday
if settings.USE_TZ:
week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015)
week_52_day_2014 = timezone.make_aware(week_52_day_2014)
week_53_day_2015 = timezone.make_aware(week_53_day_2015)
days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015]
obj_1_iso_2014 = self.create_model(week_52_day_2014, end_datetime)
obj_1_iso_2015 = self.create_model(week_1_day_2014_2015, end_datetime)
obj_2_iso_2015 = self.create_model(week_53_day_2015, end_datetime)
qs = (
DTModel.objects.filter(start_datetime__in=days)
.annotate(
extracted=ExtractIsoYear("start_datetime"),
)
.order_by("start_datetime")
)
self.assertQuerySetEqual(
qs,
[
(week_52_day_2014, 2014),
(week_1_day_2014_2015, 2015),
(week_53_day_2015, 2015),
],
lambda m: (m.start_datetime, m.extracted),
)
qs = DTModel.objects.filter(
start_datetime__iso_year=2015,
).order_by("start_datetime")
self.assertSequenceEqual(qs, [obj_1_iso_2015, obj_2_iso_2015])
qs = DTModel.objects.filter(
start_datetime__iso_year__gt=2014,
).order_by("start_datetime")
self.assertSequenceEqual(qs, [obj_1_iso_2015, obj_2_iso_2015])
qs = DTModel.objects.filter(
start_datetime__iso_year__lte=2014,
).order_by("start_datetime")
self.assertSequenceEqual(qs, [obj_1_iso_2014])
def test_extract_month_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractMonth("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.month),
(end_datetime, end_datetime.month),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractMonth("start_date")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.month),
(end_datetime, end_datetime.month),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__month=ExtractMonth("start_datetime")
).count(),
2,
)
def test_extract_day_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractDay("start_datetime")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractDay("start_date")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__day=ExtractDay("start_datetime")
).count(),
2,
)
def test_extract_week_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractWeek("start_datetime")).order_by(
"start_datetime"
),
[(start_datetime, 25), (end_datetime, 24)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractWeek("start_date")).order_by(
"start_datetime"
),
[(start_datetime, 25), (end_datetime, 24)],
lambda m: (m.start_datetime, m.extracted),
)
# both dates are from the same week.
self.assertEqual(
DTModel.objects.filter(
start_datetime__week=ExtractWeek("start_datetime")
).count(),
2,
)
def test_extract_quarter_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 8, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractQuarter("start_datetime")
).order_by("start_datetime"),
[(start_datetime, 2), (end_datetime, 3)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractQuarter("start_date")).order_by(
"start_datetime"
),
[(start_datetime, 2), (end_datetime, 3)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__quarter=ExtractQuarter("start_datetime")
).count(),
2,
)
def test_extract_quarter_func_boundaries(self):
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
end_datetime = timezone.make_aware(end_datetime)
last_quarter_2014 = datetime(2014, 12, 31, 13, 0)
first_quarter_2015 = datetime(2015, 1, 1, 13, 0)
if settings.USE_TZ:
last_quarter_2014 = timezone.make_aware(last_quarter_2014)
first_quarter_2015 = timezone.make_aware(first_quarter_2015)
dates = [last_quarter_2014, first_quarter_2015]
self.create_model(last_quarter_2014, end_datetime)
self.create_model(first_quarter_2015, end_datetime)
qs = (
DTModel.objects.filter(start_datetime__in=dates)
.annotate(
extracted=ExtractQuarter("start_datetime"),
)
.order_by("start_datetime")
)
self.assertQuerySetEqual(
qs,
[
(last_quarter_2014, 4),
(first_quarter_2015, 1),
],
lambda m: (m.start_datetime, m.extracted),
)
def test_extract_week_func_boundaries(self):
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
end_datetime = timezone.make_aware(end_datetime)
week_52_day_2014 = datetime(2014, 12, 27, 13, 0) # Sunday
week_1_day_2014_2015 = datetime(2014, 12, 31, 13, 0) # Wednesday
week_53_day_2015 = datetime(2015, 12, 31, 13, 0) # Thursday
if settings.USE_TZ:
week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015)
week_52_day_2014 = timezone.make_aware(week_52_day_2014)
week_53_day_2015 = timezone.make_aware(week_53_day_2015)
days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015]
self.create_model(week_53_day_2015, end_datetime)
self.create_model(week_52_day_2014, end_datetime)
self.create_model(week_1_day_2014_2015, end_datetime)
qs = (
DTModel.objects.filter(start_datetime__in=days)
.annotate(
extracted=ExtractWeek("start_datetime"),
)
.order_by("start_datetime")
)
self.assertQuerySetEqual(
qs,
[
(week_52_day_2014, 52),
(week_1_day_2014_2015, 1),
(week_53_day_2015, 53),
],
lambda m: (m.start_datetime, m.extracted),
)
def test_extract_weekday_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractWeekDay("start_datetime")
).order_by("start_datetime"),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractWeekDay("start_date")).order_by(
"start_datetime"
),
[
(start_datetime, (start_datetime.isoweekday() % 7) + 1),
(end_datetime, (end_datetime.isoweekday() % 7) + 1),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__week_day=ExtractWeekDay("start_datetime")
).count(),
2,
)
def test_extract_iso_weekday_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractIsoWeekDay("start_datetime"),
).order_by("start_datetime"),
[
(start_datetime, start_datetime.isoweekday()),
(end_datetime, end_datetime.isoweekday()),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractIsoWeekDay("start_date"),
).order_by("start_datetime"),
[
(start_datetime, start_datetime.isoweekday()),
(end_datetime, end_datetime.isoweekday()),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__week_day=ExtractWeekDay("start_datetime"),
).count(),
2,
)
def test_extract_hour_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractHour("start_datetime")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractHour("start_time")).order_by(
"start_datetime"
),
[(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__hour=ExtractHour("start_datetime")
).count(),
2,
)
def test_extract_minute_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractMinute("start_datetime")
).order_by("start_datetime"),
[
(start_datetime, start_datetime.minute),
(end_datetime, end_datetime.minute),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractMinute("start_time")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.minute),
(end_datetime, end_datetime.minute),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__minute=ExtractMinute("start_datetime")
).count(),
2,
)
def test_extract_second_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
extracted=ExtractSecond("start_datetime")
).order_by("start_datetime"),
[
(start_datetime, start_datetime.second),
(end_datetime, end_datetime.second),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=ExtractSecond("start_time")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.second),
(end_datetime, end_datetime.second),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__second=ExtractSecond("start_datetime")
).count(),
2,
)
def test_extract_second_func_no_fractional(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 30, 50, 783)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
obj = self.create_model(start_datetime, end_datetime)
self.assertSequenceEqual(
DTModel.objects.filter(start_datetime__second=F("end_datetime__second")),
[obj],
)
self.assertSequenceEqual(
DTModel.objects.filter(start_time__second=F("end_time__second")),
[obj],
)
def test_trunc_lookup_name_sql_injection(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
# Database backends raise an exception or don't return any results.
try:
exists = DTModel.objects.filter(
start_datetime__date=Trunc(
"start_datetime",
"year', start_datetime)) OR 1=1;--",
)
).exists()
except (DataError, OperationalError):
pass
else:
self.assertIs(exists, False)
def test_trunc_func(self):
start_datetime = datetime(999, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
def test_datetime_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc(
"start_datetime", kind, output_field=DateTimeField()
)
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime, kind)),
(end_datetime, truncate_to(end_datetime, kind)),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_date_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc("start_date", kind, output_field=DateField())
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime.date(), kind)),
(end_datetime, truncate_to(end_datetime.date(), kind)),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_time_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc("start_time", kind, output_field=TimeField())
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime.time(), kind)),
(end_datetime, truncate_to(end_datetime.time(), kind)),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_datetime_to_time_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc("start_datetime", kind, output_field=TimeField()),
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime.time(), kind)),
(end_datetime, truncate_to(end_datetime.time(), kind)),
],
lambda m: (m.start_datetime, m.truncated),
)
test_date_kind("year")
test_date_kind("quarter")
test_date_kind("month")
test_date_kind("day")
test_time_kind("hour")
test_time_kind("minute")
test_time_kind("second")
test_datetime_kind("year")
test_datetime_kind("quarter")
test_datetime_kind("month")
test_datetime_kind("day")
test_datetime_kind("hour")
test_datetime_kind("minute")
test_datetime_kind("second")
test_datetime_to_time_kind("hour")
test_datetime_to_time_kind("minute")
test_datetime_to_time_kind("second")
qs = DTModel.objects.filter(
start_datetime__date=Trunc(
"start_datetime", "day", output_field=DateField()
)
)
self.assertEqual(qs.count(), 2)
def _test_trunc_week(self, start_datetime, end_datetime):
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc("start_datetime", "week", output_field=DateTimeField())
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime, "week")),
(end_datetime, truncate_to(end_datetime, "week")),
],
lambda m: (m.start_datetime, m.truncated),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc("start_date", "week", output_field=DateField())
).order_by("start_datetime"),
[
(start_datetime, truncate_to(start_datetime.date(), "week")),
(end_datetime, truncate_to(end_datetime.date(), "week")),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_trunc_week(self):
self._test_trunc_week(
start_datetime=datetime(2015, 6, 15, 14, 30, 50, 321),
end_datetime=datetime(2016, 6, 15, 14, 10, 50, 123),
)
def test_trunc_week_before_1000(self):
self._test_trunc_week(
start_datetime=datetime(999, 6, 15, 14, 30, 50, 321),
end_datetime=datetime(2016, 6, 15, 14, 10, 50, 123),
)
def test_trunc_invalid_arguments(self):
msg = "output_field must be either DateField, TimeField, or DateTimeField"
with self.assertRaisesMessage(ValueError, msg):
list(
DTModel.objects.annotate(
truncated=Trunc(
"start_datetime", "year", output_field=IntegerField()
),
)
)
msg = "'name' isn't a DateField, TimeField, or DateTimeField."
with self.assertRaisesMessage(TypeError, msg):
list(
DTModel.objects.annotate(
truncated=Trunc("name", "year", output_field=DateTimeField()),
)
)
msg = "Cannot truncate DateField 'start_date' to DateTimeField"
with self.assertRaisesMessage(ValueError, msg):
list(DTModel.objects.annotate(truncated=Trunc("start_date", "second")))
with self.assertRaisesMessage(ValueError, msg):
list(
DTModel.objects.annotate(
truncated=Trunc(
"start_date", "month", output_field=DateTimeField()
),
)
)
msg = "Cannot truncate TimeField 'start_time' to DateTimeField"
with self.assertRaisesMessage(ValueError, msg):
list(DTModel.objects.annotate(truncated=Trunc("start_time", "month")))
with self.assertRaisesMessage(ValueError, msg):
list(
DTModel.objects.annotate(
truncated=Trunc(
"start_time", "second", output_field=DateTimeField()
),
)
)
def test_trunc_none(self):
self.create_model(None, None)
for t in (
Trunc("start_datetime", "year"),
Trunc("start_date", "year"),
Trunc("start_time", "hour"),
):
with self.subTest(t):
self.assertIsNone(
DTModel.objects.annotate(truncated=t).first().truncated
)
def test_trunc_year_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "year")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncYear("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "year")),
(end_datetime, truncate_to(end_datetime, "year")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncYear("start_date")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.date(), "year")),
(end_datetime, truncate_to(end_datetime.date(), "year")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(start_datetime=TruncYear("start_datetime")).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncYear("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncYear("start_time", output_field=TimeField())
)
)
def test_trunc_quarter_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 10, 15, 14, 10, 50, 123), "quarter")
last_quarter_2015 = truncate_to(
datetime(2015, 12, 31, 14, 10, 50, 123), "quarter"
)
first_quarter_2016 = truncate_to(
datetime(2016, 1, 1, 14, 10, 50, 123), "quarter"
)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
last_quarter_2015 = timezone.make_aware(last_quarter_2015)
first_quarter_2016 = timezone.make_aware(first_quarter_2016)
self.create_model(start_datetime=start_datetime, end_datetime=end_datetime)
self.create_model(start_datetime=end_datetime, end_datetime=start_datetime)
self.create_model(start_datetime=last_quarter_2015, end_datetime=end_datetime)
self.create_model(start_datetime=first_quarter_2016, end_datetime=end_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncQuarter("start_date")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.date(), "quarter")),
(last_quarter_2015, truncate_to(last_quarter_2015.date(), "quarter")),
(first_quarter_2016, truncate_to(first_quarter_2016.date(), "quarter")),
(end_datetime, truncate_to(end_datetime.date(), "quarter")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncQuarter("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "quarter")),
(last_quarter_2015, truncate_to(last_quarter_2015, "quarter")),
(first_quarter_2016, truncate_to(first_quarter_2016, "quarter")),
(end_datetime, truncate_to(end_datetime, "quarter")),
],
lambda m: (m.start_datetime, m.extracted),
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncQuarter("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncQuarter("start_time", output_field=TimeField())
)
)
def test_trunc_month_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "month")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncMonth("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "month")),
(end_datetime, truncate_to(end_datetime, "month")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncMonth("start_date")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.date(), "month")),
(end_datetime, truncate_to(end_datetime.date(), "month")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(start_datetime=TruncMonth("start_datetime")).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncMonth("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncMonth("start_time", output_field=TimeField())
)
)
def test_trunc_week_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "week")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncWeek("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "week")),
(end_datetime, truncate_to(end_datetime, "week")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(start_datetime=TruncWeek("start_datetime")).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncWeek("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncWeek("start_time", output_field=TimeField())
)
)
def test_trunc_date_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncDate("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.date()),
(end_datetime, end_datetime.date()),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__date=TruncDate("start_datetime")
).count(),
2,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateField"
):
list(DTModel.objects.annotate(truncated=TruncDate("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateField"
):
list(
DTModel.objects.annotate(
truncated=TruncDate("start_time", output_field=TimeField())
)
)
def test_trunc_date_none(self):
self.create_model(None, None)
self.assertIsNone(
DTModel.objects.annotate(truncated=TruncDate("start_datetime"))
.first()
.truncated
)
def test_trunc_time_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncTime("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, start_datetime.time()),
(end_datetime, end_datetime.time()),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime__time=TruncTime("start_datetime")
).count(),
2,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to TimeField"
):
list(DTModel.objects.annotate(truncated=TruncTime("start_date")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to TimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncTime("start_date", output_field=DateField())
)
)
def test_trunc_time_none(self):
self.create_model(None, None)
self.assertIsNone(
DTModel.objects.annotate(truncated=TruncTime("start_datetime"))
.first()
.truncated
)
def test_trunc_time_comparison(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 26) # 0 microseconds.
end_datetime = datetime(2015, 6, 15, 14, 30, 26, 321)
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.assertIs(
DTModel.objects.filter(
start_datetime__time=start_datetime.time(),
end_datetime__time=end_datetime.time(),
).exists(),
True,
)
self.assertIs(
DTModel.objects.annotate(
extracted_start=TruncTime("start_datetime"),
extracted_end=TruncTime("end_datetime"),
)
.filter(
extracted_start=start_datetime.time(),
extracted_end=end_datetime.time(),
)
.exists(),
True,
)
def test_trunc_day_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "day")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncDay("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "day")),
(end_datetime, truncate_to(end_datetime, "day")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(start_datetime=TruncDay("start_datetime")).count(), 1
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncDay("start_time")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncDay("start_time", output_field=TimeField())
)
)
def test_trunc_hour_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "hour")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncHour("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "hour")),
(end_datetime, truncate_to(end_datetime, "hour")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncHour("start_time")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.time(), "hour")),
(end_datetime, truncate_to(end_datetime.time(), "hour")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(start_datetime=TruncHour("start_datetime")).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncHour("start_date")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncHour("start_date", output_field=DateField())
)
)
def test_trunc_minute_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "minute")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncMinute("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "minute")),
(end_datetime, truncate_to(end_datetime, "minute")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncMinute("start_time")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.time(), "minute")),
(end_datetime, truncate_to(end_datetime.time(), "minute")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime=TruncMinute("start_datetime")
).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncMinute("start_date")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncMinute("start_date", output_field=DateField())
)
)
def test_trunc_second_func(self):
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), "second")
if settings.USE_TZ:
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncSecond("start_datetime")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime, "second")),
(end_datetime, truncate_to(end_datetime, "second")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertQuerySetEqual(
DTModel.objects.annotate(extracted=TruncSecond("start_time")).order_by(
"start_datetime"
),
[
(start_datetime, truncate_to(start_datetime.time(), "second")),
(end_datetime, truncate_to(end_datetime.time(), "second")),
],
lambda m: (m.start_datetime, m.extracted),
)
self.assertEqual(
DTModel.objects.filter(
start_datetime=TruncSecond("start_datetime")
).count(),
1,
)
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(DTModel.objects.annotate(truncated=TruncSecond("start_date")))
with self.assertRaisesMessage(
ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"
):
list(
DTModel.objects.annotate(
truncated=TruncSecond("start_date", output_field=DateField())
)
)
def test_trunc_subquery_with_parameters(self):
author_1 = Author.objects.create(name="J. R. R. Tolkien")
author_2 = Author.objects.create(name="G. R. R. Martin")
fan_since_1 = datetime(2016, 2, 3, 15, 0, 0)
fan_since_2 = datetime(2015, 2, 3, 15, 0, 0)
fan_since_3 = datetime(2017, 2, 3, 15, 0, 0)
if settings.USE_TZ:
fan_since_1 = timezone.make_aware(fan_since_1)
fan_since_2 = timezone.make_aware(fan_since_2)
fan_since_3 = timezone.make_aware(fan_since_3)
Fan.objects.create(author=author_1, name="Tom", fan_since=fan_since_1)
Fan.objects.create(author=author_1, name="Emma", fan_since=fan_since_2)
Fan.objects.create(author=author_2, name="Isabella", fan_since=fan_since_3)
inner = (
Fan.objects.filter(
author=OuterRef("pk"), name__in=("Emma", "Isabella", "Tom")
)
.values("author")
.annotate(newest_fan=Max("fan_since"))
.values("newest_fan")
)
outer = Author.objects.annotate(
newest_fan_year=TruncYear(Subquery(inner, output_field=DateTimeField()))
)
tz = datetime_timezone.utc if settings.USE_TZ else None
self.assertSequenceEqual(
outer.order_by("name").values("name", "newest_fan_year"),
[
{
"name": "G. R. R. Martin",
"newest_fan_year": datetime(2017, 1, 1, 0, 0, tzinfo=tz),
},
{
"name": "J. R. R. Tolkien",
"newest_fan_year": datetime(2016, 1, 1, 0, 0, tzinfo=tz),
},
],
)
def test_extract_outerref(self):
datetime_1 = datetime(2000, 1, 1)
datetime_2 = datetime(2001, 3, 5)
datetime_3 = datetime(2002, 1, 3)
if settings.USE_TZ:
datetime_1 = timezone.make_aware(datetime_1)
datetime_2 = timezone.make_aware(datetime_2)
datetime_3 = timezone.make_aware(datetime_3)
obj_1 = self.create_model(datetime_1, datetime_3)
obj_2 = self.create_model(datetime_2, datetime_1)
obj_3 = self.create_model(datetime_3, datetime_2)
inner_qs = DTModel.objects.filter(
start_datetime__year=2000,
start_datetime__month=ExtractMonth(OuterRef("end_datetime")),
)
qs = DTModel.objects.annotate(
related_pk=Subquery(inner_qs.values("pk")[:1]),
)
self.assertSequenceEqual(
qs.order_by("name").values("pk", "related_pk"),
[
{"pk": obj_1.pk, "related_pk": obj_1.pk},
{"pk": obj_2.pk, "related_pk": obj_1.pk},
{"pk": obj_3.pk, "related_pk": None},
],
)
@override_settings(USE_TZ=True, TIME_ZONE="UTC")
class DateFunctionWithTimeZoneTests(DateFunctionTests):
def get_timezones(self, key):
for constructor in ZONE_CONSTRUCTORS:
yield constructor(key)
def test_extract_func_with_timezone(self):
start_datetime = datetime(2015, 6, 15, 23, 30, 1, 321)
end_datetime = datetime(2015, 6, 16, 13, 11, 27, 123)
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
delta_tzinfo_pos = datetime_timezone(timedelta(hours=5))
delta_tzinfo_neg = datetime_timezone(timedelta(hours=-5, minutes=17))
for melb in self.get_timezones("Australia/Melbourne"):
with self.subTest(repr(melb)):
qs = DTModel.objects.annotate(
day=Extract("start_datetime", "day"),
day_melb=Extract("start_datetime", "day", tzinfo=melb),
week=Extract("start_datetime", "week", tzinfo=melb),
isoyear=ExtractIsoYear("start_datetime", tzinfo=melb),
weekday=ExtractWeekDay("start_datetime"),
weekday_melb=ExtractWeekDay("start_datetime", tzinfo=melb),
isoweekday=ExtractIsoWeekDay("start_datetime"),
isoweekday_melb=ExtractIsoWeekDay("start_datetime", tzinfo=melb),
quarter=ExtractQuarter("start_datetime", tzinfo=melb),
hour=ExtractHour("start_datetime"),
hour_melb=ExtractHour("start_datetime", tzinfo=melb),
hour_with_delta_pos=ExtractHour(
"start_datetime", tzinfo=delta_tzinfo_pos
),
hour_with_delta_neg=ExtractHour(
"start_datetime", tzinfo=delta_tzinfo_neg
),
minute_with_delta_neg=ExtractMinute(
"start_datetime", tzinfo=delta_tzinfo_neg
),
).order_by("start_datetime")
utc_model = qs.get()
self.assertEqual(utc_model.day, 15)
self.assertEqual(utc_model.day_melb, 16)
self.assertEqual(utc_model.week, 25)
self.assertEqual(utc_model.isoyear, 2015)
self.assertEqual(utc_model.weekday, 2)
self.assertEqual(utc_model.weekday_melb, 3)
self.assertEqual(utc_model.isoweekday, 1)
self.assertEqual(utc_model.isoweekday_melb, 2)
self.assertEqual(utc_model.quarter, 2)
self.assertEqual(utc_model.hour, 23)
self.assertEqual(utc_model.hour_melb, 9)
self.assertEqual(utc_model.hour_with_delta_pos, 4)
self.assertEqual(utc_model.hour_with_delta_neg, 18)
self.assertEqual(utc_model.minute_with_delta_neg, 47)
with timezone.override(melb):
melb_model = qs.get()
self.assertEqual(melb_model.day, 16)
self.assertEqual(melb_model.day_melb, 16)
self.assertEqual(melb_model.week, 25)
self.assertEqual(melb_model.isoyear, 2015)
self.assertEqual(melb_model.weekday, 3)
self.assertEqual(melb_model.isoweekday, 2)
self.assertEqual(melb_model.quarter, 2)
self.assertEqual(melb_model.weekday_melb, 3)
self.assertEqual(melb_model.isoweekday_melb, 2)
self.assertEqual(melb_model.hour, 9)
self.assertEqual(melb_model.hour_melb, 9)
def test_extract_func_with_timezone_minus_no_offset(self):
start_datetime = datetime(2015, 6, 15, 23, 30, 1, 321)
end_datetime = datetime(2015, 6, 16, 13, 11, 27, 123)
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
for ust_nera in self.get_timezones("Asia/Ust-Nera"):
with self.subTest(repr(ust_nera)):
qs = DTModel.objects.annotate(
hour=ExtractHour("start_datetime"),
hour_tz=ExtractHour("start_datetime", tzinfo=ust_nera),
).order_by("start_datetime")
utc_model = qs.get()
self.assertEqual(utc_model.hour, 23)
self.assertEqual(utc_model.hour_tz, 9)
with timezone.override(ust_nera):
ust_nera_model = qs.get()
self.assertEqual(ust_nera_model.hour, 9)
self.assertEqual(ust_nera_model.hour_tz, 9)
def test_extract_func_explicit_timezone_priority(self):
start_datetime = datetime(2015, 6, 15, 23, 30, 1, 321)
end_datetime = datetime(2015, 6, 16, 13, 11, 27, 123)
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
for melb in self.get_timezones("Australia/Melbourne"):
with self.subTest(repr(melb)):
with timezone.override(melb):
model = (
DTModel.objects.annotate(
day_melb=Extract("start_datetime", "day"),
day_utc=Extract(
"start_datetime", "day", tzinfo=datetime_timezone.utc
),
)
.order_by("start_datetime")
.get()
)
self.assertEqual(model.day_melb, 16)
self.assertEqual(model.day_utc, 15)
def test_extract_invalid_field_with_timezone(self):
for melb in self.get_timezones("Australia/Melbourne"):
with self.subTest(repr(melb)):
msg = "tzinfo can only be used with DateTimeField."
with self.assertRaisesMessage(ValueError, msg):
DTModel.objects.annotate(
day_melb=Extract("start_date", "day", tzinfo=melb),
).get()
with self.assertRaisesMessage(ValueError, msg):
DTModel.objects.annotate(
hour_melb=Extract("start_time", "hour", tzinfo=melb),
).get()
def test_trunc_timezone_applied_before_truncation(self):
start_datetime = datetime(2016, 1, 1, 1, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
for melb, pacific in zip(
self.get_timezones("Australia/Melbourne"),
self.get_timezones("America/Los_Angeles"),
):
with self.subTest((repr(melb), repr(pacific))):
model = (
DTModel.objects.annotate(
melb_year=TruncYear("start_datetime", tzinfo=melb),
pacific_year=TruncYear("start_datetime", tzinfo=pacific),
melb_date=TruncDate("start_datetime", tzinfo=melb),
pacific_date=TruncDate("start_datetime", tzinfo=pacific),
melb_time=TruncTime("start_datetime", tzinfo=melb),
pacific_time=TruncTime("start_datetime", tzinfo=pacific),
)
.order_by("start_datetime")
.get()
)
melb_start_datetime = start_datetime.astimezone(melb)
pacific_start_datetime = start_datetime.astimezone(pacific)
self.assertEqual(model.start_datetime, start_datetime)
self.assertEqual(
model.melb_year, truncate_to(start_datetime, "year", melb)
)
self.assertEqual(
model.pacific_year, truncate_to(start_datetime, "year", pacific)
)
self.assertEqual(model.start_datetime.year, 2016)
self.assertEqual(model.melb_year.year, 2016)
self.assertEqual(model.pacific_year.year, 2015)
self.assertEqual(model.melb_date, melb_start_datetime.date())
self.assertEqual(model.pacific_date, pacific_start_datetime.date())
self.assertEqual(model.melb_time, melb_start_datetime.time())
self.assertEqual(model.pacific_time, pacific_start_datetime.time())
@needs_pytz
@ignore_warnings(category=RemovedInDjango50Warning)
def test_trunc_ambiguous_and_invalid_times(self):
sao = pytz.timezone("America/Sao_Paulo")
start_datetime = datetime(2016, 10, 16, 13, tzinfo=datetime_timezone.utc)
end_datetime = datetime(2016, 2, 21, 1, tzinfo=datetime_timezone.utc)
self.create_model(start_datetime, end_datetime)
with timezone.override(sao):
with self.assertRaisesMessage(
pytz.NonExistentTimeError, "2016-10-16 00:00:00"
):
model = DTModel.objects.annotate(
truncated_start=TruncDay("start_datetime")
).get()
with self.assertRaisesMessage(
pytz.AmbiguousTimeError, "2016-02-20 23:00:00"
):
model = DTModel.objects.annotate(
truncated_end=TruncHour("end_datetime")
).get()
model = DTModel.objects.annotate(
truncated_start=TruncDay("start_datetime", is_dst=False),
truncated_end=TruncHour("end_datetime", is_dst=False),
).get()
self.assertEqual(model.truncated_start.dst(), timedelta(0))
self.assertEqual(model.truncated_end.dst(), timedelta(0))
model = DTModel.objects.annotate(
truncated_start=TruncDay("start_datetime", is_dst=True),
truncated_end=TruncHour("end_datetime", is_dst=True),
).get()
self.assertEqual(model.truncated_start.dst(), timedelta(0, 3600))
self.assertEqual(model.truncated_end.dst(), timedelta(0, 3600))
def test_trunc_func_with_timezone(self):
"""
If the truncated datetime transitions to a different offset (daylight
saving) then the returned value will have that new timezone/offset.
"""
start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321)
end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123)
start_datetime = timezone.make_aware(start_datetime)
end_datetime = timezone.make_aware(end_datetime)
self.create_model(start_datetime, end_datetime)
self.create_model(end_datetime, start_datetime)
for melb in self.get_timezones("Australia/Melbourne"):
with self.subTest(repr(melb)):
def test_datetime_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc(
"start_datetime",
kind,
output_field=DateTimeField(),
tzinfo=melb,
)
).order_by("start_datetime"),
[
(
start_datetime,
truncate_to(
start_datetime.astimezone(melb), kind, melb
),
),
(
end_datetime,
truncate_to(end_datetime.astimezone(melb), kind, melb),
),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_datetime_to_date_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc(
"start_datetime",
kind,
output_field=DateField(),
tzinfo=melb,
),
).order_by("start_datetime"),
[
(
start_datetime,
truncate_to(
start_datetime.astimezone(melb).date(), kind
),
),
(
end_datetime,
truncate_to(end_datetime.astimezone(melb).date(), kind),
),
],
lambda m: (m.start_datetime, m.truncated),
)
def test_datetime_to_time_kind(kind):
self.assertQuerySetEqual(
DTModel.objects.annotate(
truncated=Trunc(
"start_datetime",
kind,
output_field=TimeField(),
tzinfo=melb,
)
).order_by("start_datetime"),
[
(
start_datetime,
truncate_to(
start_datetime.astimezone(melb).time(), kind
),
),
(
end_datetime,
truncate_to(end_datetime.astimezone(melb).time(), kind),
),
],
lambda m: (m.start_datetime, m.truncated),
)
test_datetime_to_date_kind("year")
test_datetime_to_date_kind("quarter")
test_datetime_to_date_kind("month")
test_datetime_to_date_kind("week")
test_datetime_to_date_kind("day")
test_datetime_to_time_kind("hour")
test_datetime_to_time_kind("minute")
test_datetime_to_time_kind("second")
test_datetime_kind("year")
test_datetime_kind("quarter")
test_datetime_kind("month")
test_datetime_kind("week")
test_datetime_kind("day")
test_datetime_kind("hour")
test_datetime_kind("minute")
test_datetime_kind("second")
qs = DTModel.objects.filter(
start_datetime__date=Trunc(
"start_datetime", "day", output_field=DateField()
)
)
self.assertEqual(qs.count(), 2)
def test_trunc_invalid_field_with_timezone(self):
for melb in self.get_timezones("Australia/Melbourne"):
with self.subTest(repr(melb)):
msg = "tzinfo can only be used with DateTimeField."
with self.assertRaisesMessage(ValueError, msg):
DTModel.objects.annotate(
day_melb=Trunc("start_date", "day", tzinfo=melb),
).get()
with self.assertRaisesMessage(ValueError, msg):
DTModel.objects.annotate(
hour_melb=Trunc("start_time", "hour", tzinfo=melb),
).get()
|
d554f81eb8e08502ee85035078dadbbd0d349378972f2669e4f763fad0326784 | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse, HttpResponsePermanentRedirect
from django.middleware.locale import LocaleMiddleware
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
from django.test.client import RequestFactory
from django.test.utils import override_script_prefix
from django.urls import clear_url_caches, resolve, reverse, translate_url
from django.utils import translation
class PermanentRedirectLocaleMiddleWare(LocaleMiddleware):
response_redirect_class = HttpResponsePermanentRedirect
@override_settings(
USE_I18N=True,
LOCALE_PATHS=[
os.path.join(os.path.dirname(__file__), "locale"),
],
LANGUAGE_CODE="en-us",
LANGUAGES=[
("nl", "Dutch"),
("en", "English"),
("pt-br", "Brazilian Portuguese"),
],
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
],
ROOT_URLCONF="i18n.patterns.urls.default",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
"OPTIONS": {
"context_processors": [
"django.template.context_processors.i18n",
],
},
}
],
)
class URLTestCaseBase(SimpleTestCase):
"""
TestCase base-class for the URL tests.
"""
def setUp(self):
# Make sure the cache is empty before we are doing our tests.
clear_url_caches()
def tearDown(self):
# Make sure we will leave an empty cache for other testcases.
clear_url_caches()
class URLPrefixTests(URLTestCaseBase):
"""
Tests if the `i18n_patterns` is adding the prefix correctly.
"""
def test_not_prefixed(self):
with translation.override("en"):
self.assertEqual(reverse("not-prefixed"), "/not-prefixed/")
self.assertEqual(
reverse("not-prefixed-included-url"), "/not-prefixed-include/foo/"
)
with translation.override("nl"):
self.assertEqual(reverse("not-prefixed"), "/not-prefixed/")
self.assertEqual(
reverse("not-prefixed-included-url"), "/not-prefixed-include/foo/"
)
def test_prefixed(self):
with translation.override("en"):
self.assertEqual(reverse("prefixed"), "/en/prefixed/")
with translation.override("nl"):
self.assertEqual(reverse("prefixed"), "/nl/prefixed/")
with translation.override(None):
self.assertEqual(
reverse("prefixed"), "/%s/prefixed/" % settings.LANGUAGE_CODE
)
@override_settings(ROOT_URLCONF="i18n.patterns.urls.wrong")
def test_invalid_prefix_use(self):
msg = "Using i18n_patterns in an included URLconf is not allowed."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse("account:register")
@override_settings(ROOT_URLCONF="i18n.patterns.urls.disabled")
class URLDisabledTests(URLTestCaseBase):
@override_settings(USE_I18N=False)
def test_prefixed_i18n_disabled(self):
with translation.override("en"):
self.assertEqual(reverse("prefixed"), "/prefixed/")
with translation.override("nl"):
self.assertEqual(reverse("prefixed"), "/prefixed/")
class RequestURLConfTests(SimpleTestCase):
@override_settings(ROOT_URLCONF="i18n.patterns.urls.path_unused")
def test_request_urlconf_considered(self):
request = RequestFactory().get("/nl/")
request.urlconf = "i18n.patterns.urls.default"
middleware = LocaleMiddleware(lambda req: HttpResponse())
with translation.override("nl"):
middleware.process_request(request)
self.assertEqual(request.LANGUAGE_CODE, "nl")
@override_settings(ROOT_URLCONF="i18n.patterns.urls.path_unused")
class PathUnusedTests(URLTestCaseBase):
"""
If no i18n_patterns is used in root URLconfs, then no language activation
activation happens based on url prefix.
"""
def test_no_lang_activate(self):
response = self.client.get("/nl/foo/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "en")
self.assertEqual(response.context["LANGUAGE_CODE"], "en")
class URLTranslationTests(URLTestCaseBase):
"""
Tests if the pattern-strings are translated correctly (within the
`i18n_patterns` and the normal `patterns` function).
"""
def test_no_prefix_translated(self):
with translation.override("en"):
self.assertEqual(reverse("no-prefix-translated"), "/translated/")
self.assertEqual(
reverse("no-prefix-translated-slug", kwargs={"slug": "yeah"}),
"/translated/yeah/",
)
with translation.override("nl"):
self.assertEqual(reverse("no-prefix-translated"), "/vertaald/")
self.assertEqual(
reverse("no-prefix-translated-slug", kwargs={"slug": "yeah"}),
"/vertaald/yeah/",
)
with translation.override("pt-br"):
self.assertEqual(reverse("no-prefix-translated"), "/traduzidos/")
self.assertEqual(
reverse("no-prefix-translated-slug", kwargs={"slug": "yeah"}),
"/traduzidos/yeah/",
)
def test_users_url(self):
with translation.override("en"):
self.assertEqual(reverse("users"), "/en/users/")
with translation.override("nl"):
self.assertEqual(reverse("users"), "/nl/gebruikers/")
self.assertEqual(reverse("prefixed_xml"), "/nl/prefixed.xml")
with translation.override("pt-br"):
self.assertEqual(reverse("users"), "/pt-br/usuarios/")
def test_translate_url_utility(self):
with translation.override("en"):
self.assertEqual(
translate_url("/en/nonexistent/", "nl"), "/en/nonexistent/"
)
self.assertEqual(translate_url("/en/users/", "nl"), "/nl/gebruikers/")
# Namespaced URL
self.assertEqual(
translate_url("/en/account/register/", "nl"), "/nl/profiel/registreren/"
)
# path() URL pattern
self.assertEqual(
translate_url("/en/account/register-as-path/", "nl"),
"/nl/profiel/registreren-als-pad/",
)
self.assertEqual(translation.get_language(), "en")
# URL with parameters.
self.assertEqual(
translate_url("/en/with-arguments/regular-argument/", "nl"),
"/nl/with-arguments/regular-argument/",
)
self.assertEqual(
translate_url(
"/en/with-arguments/regular-argument/optional.html", "nl"
),
"/nl/with-arguments/regular-argument/optional.html",
)
with translation.override("nl"):
self.assertEqual(translate_url("/nl/gebruikers/", "en"), "/en/users/")
self.assertEqual(translation.get_language(), "nl")
def test_reverse_translated_with_captured_kwargs(self):
with translation.override("en"):
match = resolve("/translated/apo/")
# Links to the same page in other languages.
tests = [
("nl", "/vertaald/apo/"),
("pt-br", "/traduzidos/apo/"),
]
for lang, expected_link in tests:
with translation.override(lang):
self.assertEqual(
reverse(
match.url_name, args=match.args, kwargs=match.captured_kwargs
),
expected_link,
)
def test_locale_not_interepreted_as_regex(self):
with translation.override("e("):
# Would previously error:
# re.error: missing ), unterminated subpattern at position 1
reverse("users")
class URLNamespaceTests(URLTestCaseBase):
"""
Tests if the translations are still working within namespaces.
"""
def test_account_register(self):
with translation.override("en"):
self.assertEqual(reverse("account:register"), "/en/account/register/")
self.assertEqual(
reverse("account:register-as-path"), "/en/account/register-as-path/"
)
with translation.override("nl"):
self.assertEqual(reverse("account:register"), "/nl/profiel/registreren/")
self.assertEqual(
reverse("account:register-as-path"), "/nl/profiel/registreren-als-pad/"
)
class URLRedirectTests(URLTestCaseBase):
"""
Tests if the user gets redirected to the right URL when there is no
language-prefix in the request URL.
"""
def test_no_prefix_response(self):
response = self.client.get("/not-prefixed/")
self.assertEqual(response.status_code, 200)
def test_en_redirect(self):
response = self.client.get(
"/account/register/", headers={"accept-language": "en"}
)
self.assertRedirects(response, "/en/account/register/")
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
def test_en_redirect_wrong_url(self):
response = self.client.get(
"/profiel/registreren/", headers={"accept-language": "en"}
)
self.assertEqual(response.status_code, 404)
def test_nl_redirect(self):
response = self.client.get(
"/profiel/registreren/", headers={"accept-language": "nl"}
)
self.assertRedirects(response, "/nl/profiel/registreren/")
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
def test_nl_redirect_wrong_url(self):
response = self.client.get(
"/account/register/", headers={"accept-language": "nl"}
)
self.assertEqual(response.status_code, 404)
def test_pt_br_redirect(self):
response = self.client.get(
"/conta/registre-se/", headers={"accept-language": "pt-br"}
)
self.assertRedirects(response, "/pt-br/conta/registre-se/")
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
def test_pl_pl_redirect(self):
# language from outside of the supported LANGUAGES list
response = self.client.get(
"/account/register/", headers={"accept-language": "pl-pl"}
)
self.assertRedirects(response, "/en/account/register/")
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
@override_settings(
MIDDLEWARE=[
"i18n.patterns.tests.PermanentRedirectLocaleMiddleWare",
"django.middleware.common.CommonMiddleware",
],
)
def test_custom_redirect_class(self):
response = self.client.get(
"/account/register/", headers={"accept-language": "en"}
)
self.assertRedirects(response, "/en/account/register/", 301)
class URLVaryAcceptLanguageTests(URLTestCaseBase):
"""
'Accept-Language' is not added to the Vary header when using prefixed URLs.
"""
def test_no_prefix_response(self):
response = self.client.get("/not-prefixed/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get("Vary"), "Accept-Language")
def test_en_redirect(self):
"""
The redirect to a prefixed URL depends on 'Accept-Language' and
'Cookie', but once prefixed no header is set.
"""
response = self.client.get(
"/account/register/", headers={"accept-language": "en"}
)
self.assertRedirects(response, "/en/account/register/")
self.assertEqual(response.get("Vary"), "Accept-Language, Cookie")
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
self.assertFalse(response.get("Vary"))
class URLRedirectWithoutTrailingSlashTests(URLTestCaseBase):
"""
Tests the redirect when the requested URL doesn't end with a slash
(`settings.APPEND_SLASH=True`).
"""
def test_not_prefixed_redirect(self):
response = self.client.get("/not-prefixed", headers={"accept-language": "en"})
self.assertRedirects(response, "/not-prefixed/", 301)
def test_en_redirect(self):
response = self.client.get(
"/account/register", headers={"accept-language": "en"}, follow=True
)
# We only want one redirect, bypassing CommonMiddleware
self.assertEqual(response.redirect_chain, [("/en/account/register/", 302)])
self.assertRedirects(response, "/en/account/register/", 302)
response = self.client.get(
"/prefixed.xml", headers={"accept-language": "en"}, follow=True
)
self.assertRedirects(response, "/en/prefixed.xml", 302)
class URLRedirectWithoutTrailingSlashSettingTests(URLTestCaseBase):
"""
Tests the redirect when the requested URL doesn't end with a slash
(`settings.APPEND_SLASH=False`).
"""
@override_settings(APPEND_SLASH=False)
def test_not_prefixed_redirect(self):
response = self.client.get("/not-prefixed", headers={"accept-language": "en"})
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=False)
def test_en_redirect(self):
response = self.client.get(
"/account/register-without-slash", headers={"accept-language": "en"}
)
self.assertRedirects(response, "/en/account/register-without-slash", 302)
response = self.client.get(response.headers["location"])
self.assertEqual(response.status_code, 200)
class URLResponseTests(URLTestCaseBase):
"""Tests if the response has the correct language code."""
def test_not_prefixed_with_prefix(self):
response = self.client.get("/en/not-prefixed/")
self.assertEqual(response.status_code, 404)
def test_en_url(self):
response = self.client.get("/en/account/register/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "en")
self.assertEqual(response.context["LANGUAGE_CODE"], "en")
def test_nl_url(self):
response = self.client.get("/nl/profiel/registreren/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "nl")
self.assertEqual(response.context["LANGUAGE_CODE"], "nl")
def test_wrong_en_prefix(self):
response = self.client.get("/en/profiel/registreren/")
self.assertEqual(response.status_code, 404)
def test_wrong_nl_prefix(self):
response = self.client.get("/nl/account/register/")
self.assertEqual(response.status_code, 404)
def test_pt_br_url(self):
response = self.client.get("/pt-br/conta/registre-se/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "pt-br")
self.assertEqual(response.context["LANGUAGE_CODE"], "pt-br")
def test_en_path(self):
response = self.client.get("/en/account/register-as-path/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "en")
self.assertEqual(response.context["LANGUAGE_CODE"], "en")
def test_nl_path(self):
response = self.client.get("/nl/profiel/registreren-als-pad/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers["content-language"], "nl")
self.assertEqual(response.context["LANGUAGE_CODE"], "nl")
class URLRedirectWithScriptAliasTests(URLTestCaseBase):
"""
#21579 - LocaleMiddleware should respect the script prefix.
"""
def test_language_prefix_with_script_prefix(self):
prefix = "/script_prefix"
with override_script_prefix(prefix):
response = self.client.get(
"/prefixed/", headers={"accept-language": "en"}, SCRIPT_NAME=prefix
)
self.assertRedirects(
response, "%s/en/prefixed/" % prefix, target_status_code=404
)
class URLTagTests(URLTestCaseBase):
"""
Test if the language tag works.
"""
def test_strings_only(self):
t = Template(
"""{% load i18n %}
{% language 'nl' %}{% url 'no-prefix-translated' %}{% endlanguage %}
{% language 'pt-br' %}{% url 'no-prefix-translated' %}{% endlanguage %}"""
)
self.assertEqual(
t.render(Context({})).strip().split(), ["/vertaald/", "/traduzidos/"]
)
def test_context(self):
ctx = Context({"lang1": "nl", "lang2": "pt-br"})
tpl = Template(
"""{% load i18n %}
{% language lang1 %}{% url 'no-prefix-translated' %}{% endlanguage %}
{% language lang2 %}{% url 'no-prefix-translated' %}{% endlanguage %}"""
)
self.assertEqual(
tpl.render(ctx).strip().split(), ["/vertaald/", "/traduzidos/"]
)
def test_args(self):
tpl = Template(
"""
{% load i18n %}
{% language 'nl' %}
{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %}
{% language 'pt-br' %}
{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %}
"""
)
self.assertEqual(
tpl.render(Context({})).strip().split(),
["/vertaald/apo/", "/traduzidos/apo/"],
)
def test_kwargs(self):
tpl = Template(
"""
{% load i18n %}
{% language 'nl' %}
{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %}
{% language 'pt-br' %}
{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %}
"""
)
self.assertEqual(
tpl.render(Context({})).strip().split(),
["/vertaald/apo/", "/traduzidos/apo/"],
)
|
0f7608cee2fa19a65749c2f4f9f27504f8a4c6af20e6156afc487a9b6eb5a579 | from django.utils.version import get_version
VERSION = (5, 0, 0, "alpha", 0)
__version__ = get_version(VERSION)
def setup(set_prefix=True):
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
"""
from django.apps import apps
from django.conf import settings
from django.urls import set_script_prefix
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
if set_prefix:
set_script_prefix(
"/" if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
)
apps.populate(settings.INSTALLED_APPS)
|
fa905a740b42c7882b74a72aa44e6e309666b0b5bc4d497912c90da3800b64dd | #!/usr/bin/env python
import argparse
import atexit
import copy
import gc
import multiprocessing
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import warnings
from pathlib import Path
try:
import django
except ImportError as e:
raise RuntimeError(
"Django module not found, reference tests/README.rst for instructions."
) from e
else:
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, connections
from django.test import TestCase, TransactionTestCase
from django.test.runner import get_max_test_processes, parallel_type
from django.test.selenium import SeleniumTestCaseBase
from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner
from django.utils.deprecation import (
RemovedInDjango51Warning,
RemovedInDjango60Warning,
)
from django.utils.log import DEFAULT_LOGGING
try:
import MySQLdb
except ImportError:
pass
else:
# Ignore informational warnings from QuerySet.explain().
warnings.filterwarnings("ignore", r"\(1003, *", category=MySQLdb.Warning)
# Make deprecation warnings errors to ensure no usage of deprecated features.
warnings.simplefilter("error", RemovedInDjango60Warning)
warnings.simplefilter("error", RemovedInDjango51Warning)
# Make resource and runtime warning errors to ensure no usage of error prone
# patterns.
warnings.simplefilter("error", ResourceWarning)
warnings.simplefilter("error", RuntimeWarning)
# Ignore known warnings in test dependencies.
warnings.filterwarnings(
"ignore", "'U' mode is deprecated", DeprecationWarning, module="docutils.io"
)
# Reduce garbage collection frequency to improve performance. Since CPython
# uses refcounting, garbage collection only collects objects with cyclic
# references, which are a minority, so the garbage collection threshold can be
# larger than the default threshold of 700 allocations + deallocations without
# much increase in memory usage.
gc.set_threshold(100_000)
RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__))
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, "templates")
# Create a specific subdirectory for the duration of the test suite.
TMPDIR = tempfile.mkdtemp(prefix="django_")
# Set the TMPDIR environment variable in addition to tempfile.tempdir
# so that children processes inherit it.
tempfile.tempdir = os.environ["TMPDIR"] = TMPDIR
# Removing the temporary TMPDIR.
atexit.register(shutil.rmtree, TMPDIR)
# This is a dict mapping RUNTESTS_DIR subdirectory to subdirectories of that
# directory to skip when searching for test modules.
SUBDIRS_TO_SKIP = {
"": {"import_error_package", "test_runner_apps"},
"gis_tests": {"data"},
}
ALWAYS_INSTALLED_APPS = [
"django.contrib.contenttypes",
"django.contrib.auth",
"django.contrib.sites",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.admin.apps.SimpleAdminConfig",
"django.contrib.staticfiles",
]
ALWAYS_MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
]
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
# and isn't in an application in INSTALLED_APPS."
CONTRIB_TESTS_TO_APPS = {
"deprecation": ["django.contrib.flatpages", "django.contrib.redirects"],
"flatpages_tests": ["django.contrib.flatpages"],
"redirects_tests": ["django.contrib.redirects"],
}
def get_test_modules(gis_enabled):
"""
Scan the tests directory and yield the names of all test modules.
The yielded names have either one dotted part like "test_runner" or, in
the case of GIS tests, two dotted parts like "gis_tests.gdal_tests".
"""
discovery_dirs = [""]
if gis_enabled:
# GIS tests are in nested apps
discovery_dirs.append("gis_tests")
else:
SUBDIRS_TO_SKIP[""].add("gis_tests")
for dirname in discovery_dirs:
dirpath = os.path.join(RUNTESTS_DIR, dirname)
subdirs_to_skip = SUBDIRS_TO_SKIP[dirname]
with os.scandir(dirpath) as entries:
for f in entries:
if (
"." in f.name
or os.path.basename(f.name) in subdirs_to_skip
or f.is_file()
or not os.path.exists(os.path.join(f.path, "__init__.py"))
):
continue
test_module = f.name
if dirname:
test_module = dirname + "." + test_module
yield test_module
def get_label_module(label):
"""Return the top-level module part for a test label."""
path = Path(label)
if len(path.parts) == 1:
# Interpret the label as a dotted module name.
return label.split(".")[0]
# Otherwise, interpret the label as a path. Check existence first to
# provide a better error message than relative_to() if it doesn't exist.
if not path.exists():
raise RuntimeError(f"Test label path {label} does not exist")
path = path.resolve()
rel_path = path.relative_to(RUNTESTS_DIR)
return rel_path.parts[0]
def get_filtered_test_modules(start_at, start_after, gis_enabled, test_labels=None):
if test_labels is None:
test_labels = []
# Reduce each test label to just the top-level module part.
label_modules = set()
for label in test_labels:
test_module = get_label_module(label)
label_modules.add(test_module)
# It would be nice to put this validation earlier but it must come after
# django.setup() so that connection.features.gis_enabled can be accessed.
if "gis_tests" in label_modules and not gis_enabled:
print("Aborting: A GIS database backend is required to run gis_tests.")
sys.exit(1)
def _module_match_label(module_name, label):
# Exact or ancestor match.
return module_name == label or module_name.startswith(label + ".")
start_label = start_at or start_after
for test_module in get_test_modules(gis_enabled):
if start_label:
if not _module_match_label(test_module, start_label):
continue
start_label = ""
if not start_at:
assert start_after
# Skip the current one before starting.
continue
# If the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), include the test module.
if not test_labels or any(
_module_match_label(test_module, label_module)
for label_module in label_modules
):
yield test_module
def setup_collect_tests(start_at, start_after, test_labels=None):
state = {
"INSTALLED_APPS": settings.INSTALLED_APPS,
"ROOT_URLCONF": getattr(settings, "ROOT_URLCONF", ""),
"TEMPLATES": settings.TEMPLATES,
"LANGUAGE_CODE": settings.LANGUAGE_CODE,
"STATIC_URL": settings.STATIC_URL,
"STATIC_ROOT": settings.STATIC_ROOT,
"MIDDLEWARE": settings.MIDDLEWARE,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = "urls"
settings.STATIC_URL = "static/"
settings.STATIC_ROOT = os.path.join(TMPDIR, "static")
settings.TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_DIR],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
]
settings.LANGUAGE_CODE = "en"
settings.SITE_ID = 1
settings.MIDDLEWARE = ALWAYS_MIDDLEWARE
settings.MIGRATION_MODULES = {
# This lets us skip creating migrations for the test models as many of
# them depend on one of the following contrib applications.
"auth": None,
"contenttypes": None,
"sessions": None,
}
log_config = copy.deepcopy(DEFAULT_LOGGING)
# Filter out non-error logging so we don't have to capture it in lots of
# tests.
log_config["loggers"]["django"]["level"] = "ERROR"
settings.LOGGING = log_config
settings.SILENCED_SYSTEM_CHECKS = [
"fields.W342", # ForeignKey(unique=True) -> OneToOneField
# django.contrib.postgres.fields.CICharField deprecated.
"fields.W905",
"postgres.W004",
# django.contrib.postgres.fields.CIEmailField deprecated.
"fields.W906",
# django.contrib.postgres.fields.CITextField deprecated.
"fields.W907",
]
# Load all the ALWAYS_INSTALLED_APPS.
django.setup()
# This flag must be evaluated after django.setup() because otherwise it can
# raise AppRegistryNotReady when running gis_tests in isolation on some
# backends (e.g. PostGIS).
gis_enabled = connection.features.gis_enabled
test_modules = list(
get_filtered_test_modules(
start_at,
start_after,
gis_enabled,
test_labels=test_labels,
)
)
return test_modules, state
def teardown_collect_tests(state):
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
def get_installed():
return [app_config.name for app_config in apps.get_app_configs()]
# This function should be called only after calling django.setup(),
# since it calls connection.features.gis_enabled.
def get_apps_to_install(test_modules):
for test_module in test_modules:
if test_module in CONTRIB_TESTS_TO_APPS:
yield from CONTRIB_TESTS_TO_APPS[test_module]
yield test_module
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
# @override_settings(INSTALLED_APPS=...) on all test cases.
if connection.features.gis_enabled:
yield "django.contrib.gis"
def setup_run_tests(verbosity, start_at, start_after, test_labels=None):
test_modules, state = setup_collect_tests(
start_at, start_after, test_labels=test_labels
)
installed_apps = set(get_installed())
for app in get_apps_to_install(test_modules):
if app in installed_apps:
continue
if verbosity >= 2:
print(f"Importing application {app}")
settings.INSTALLED_APPS.append(app)
installed_apps.add(app)
apps.set_installed_apps(settings.INSTALLED_APPS)
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception(
"Please define available_apps in TransactionTestCase and its subclasses."
)
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
# Set an environment variable that other code may consult to see if
# Django's own test suite is running.
os.environ["RUNNING_DJANGOS_TEST_SUITE"] = "true"
test_labels = test_labels or test_modules
return test_labels, state
def teardown_run_tests(state):
teardown_collect_tests(state)
# Discard the multiprocessing.util finalizer that tries to remove a
# temporary directory that's already removed by this script's
# atexit.register(shutil.rmtree, TMPDIR) handler. Prevents
# FileNotFoundError at the end of a test run (#27890).
from multiprocessing.util import _finalizer_registry
_finalizer_registry.pop((-100, 0), None)
del os.environ["RUNNING_DJANGOS_TEST_SUITE"]
class ActionSelenium(argparse.Action):
"""
Validate the comma-separated list of requested browsers.
"""
def __call__(self, parser, namespace, values, option_string=None):
try:
import selenium # NOQA
except ImportError as e:
raise ImproperlyConfigured(f"Error loading selenium module: {e}")
browsers = values.split(",")
for browser in browsers:
try:
SeleniumTestCaseBase.import_webdriver(browser)
except ImportError:
raise argparse.ArgumentError(
self, "Selenium browser specification '%s' is not valid." % browser
)
setattr(namespace, self.dest, browsers)
def django_tests(
verbosity,
interactive,
failfast,
keepdb,
reverse,
test_labels,
debug_sql,
parallel,
tags,
exclude_tags,
test_name_patterns,
start_at,
start_after,
pdb,
buffer,
timing,
shuffle,
):
if parallel in {0, "auto"}:
max_parallel = get_max_test_processes()
else:
max_parallel = parallel
if verbosity >= 1:
msg = "Testing against Django installed in '%s'" % os.path.dirname(
django.__file__
)
if max_parallel > 1:
msg += " with up to %d processes" % max_parallel
print(msg)
process_setup_args = (verbosity, start_at, start_after, test_labels)
test_labels, state = setup_run_tests(*process_setup_args)
# Run the test suite, including the extra validation tests.
if not hasattr(settings, "TEST_RUNNER"):
settings.TEST_RUNNER = "django.test.runner.DiscoverRunner"
if parallel in {0, "auto"}:
# This doesn't work before django.setup() on some databases.
if all(conn.features.can_clone_databases for conn in connections.all()):
parallel = max_parallel
else:
parallel = 1
TestRunner = get_runner(settings)
TestRunner.parallel_test_suite.process_setup = setup_run_tests
TestRunner.parallel_test_suite.process_setup_args = process_setup_args
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
keepdb=keepdb,
reverse=reverse,
debug_sql=debug_sql,
parallel=parallel,
tags=tags,
exclude_tags=exclude_tags,
test_name_patterns=test_name_patterns,
pdb=pdb,
buffer=buffer,
timing=timing,
shuffle=shuffle,
)
failures = test_runner.run_tests(test_labels)
teardown_run_tests(state)
return failures
def collect_test_modules(start_at, start_after):
test_modules, state = setup_collect_tests(start_at, start_after)
teardown_collect_tests(state)
return test_modules
def get_subprocess_args(options):
subprocess_args = [sys.executable, __file__, "--settings=%s" % options.settings]
if options.failfast:
subprocess_args.append("--failfast")
if options.verbosity:
subprocess_args.append("--verbosity=%s" % options.verbosity)
if not options.interactive:
subprocess_args.append("--noinput")
if options.tags:
subprocess_args.append("--tag=%s" % options.tags)
if options.exclude_tags:
subprocess_args.append("--exclude_tag=%s" % options.exclude_tags)
if options.shuffle is not False:
if options.shuffle is None:
subprocess_args.append("--shuffle")
else:
subprocess_args.append("--shuffle=%s" % options.shuffle)
return subprocess_args
def bisect_tests(bisection_label, options, test_labels, start_at, start_after):
if not test_labels:
test_labels = collect_test_modules(start_at, start_after)
print("***** Bisecting test suite: %s" % " ".join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, "model_inheritance_same_model_name"]:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels) // 2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print("***** Pass %da: Running the first half of the test suite" % iteration)
print("***** Test labels: %s" % " ".join(test_labels_a))
failures_a = subprocess.run(subprocess_args + test_labels_a)
print("***** Pass %db: Running the second half of the test suite" % iteration)
print("***** Test labels: %s" % " ".join(test_labels_b))
print("")
failures_b = subprocess.run(subprocess_args + test_labels_b)
if failures_a.returncode and not failures_b.returncode:
print("***** Problem found in first half. Bisecting again...")
iteration += 1
test_labels = test_labels_a[:-1]
elif failures_b.returncode and not failures_a.returncode:
print("***** Problem found in second half. Bisecting again...")
iteration += 1
test_labels = test_labels_b[:-1]
elif failures_a.returncode and failures_b.returncode:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
def paired_tests(paired_test, options, test_labels, start_at, start_after):
if not test_labels:
test_labels = collect_test_modules(start_at, start_after)
print("***** Trying paired execution")
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, "model_inheritance_same_model_name"]:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
for i, label in enumerate(test_labels):
print(
"***** %d of %d: Check test pairing with %s"
% (i + 1, len(test_labels), label)
)
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print("***** Found problem pair with %s" % label)
return
print("***** No problem pair found")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the Django test suite.")
parser.add_argument(
"modules",
nargs="*",
metavar="module",
help='Optional path(s) to test modules; e.g. "i18n" or '
'"i18n.tests.TranslationTests.test_lazy_objects".',
)
parser.add_argument(
"-v",
"--verbosity",
default=1,
type=int,
choices=[0, 1, 2, 3],
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output",
)
parser.add_argument(
"--noinput",
action="store_false",
dest="interactive",
help="Tells Django to NOT prompt the user for input of any kind.",
)
parser.add_argument(
"--failfast",
action="store_true",
help="Tells Django to stop running the test suite after first failed test.",
)
parser.add_argument(
"--keepdb",
action="store_true",
help="Tells Django to preserve the test database between runs.",
)
parser.add_argument(
"--settings",
help='Python path to settings module, e.g. "myproject.settings". If '
"this isn't provided, either the DJANGO_SETTINGS_MODULE "
'environment variable or "test_sqlite" will be used.',
)
parser.add_argument(
"--bisect",
help="Bisect the test suite to discover a test that causes a test "
"failure when combined with the named test.",
)
parser.add_argument(
"--pair",
help="Run the test suite in pairs with the named test to find problem pairs.",
)
parser.add_argument(
"--shuffle",
nargs="?",
default=False,
type=int,
metavar="SEED",
help=(
"Shuffle the order of test cases to help check that tests are "
"properly isolated."
),
)
parser.add_argument(
"--reverse",
action="store_true",
help="Sort test suites and test cases in opposite order to debug "
"test side effects not apparent with normal execution lineup.",
)
parser.add_argument(
"--selenium",
action=ActionSelenium,
metavar="BROWSERS",
help="A comma-separated list of browsers to run the Selenium tests against.",
)
parser.add_argument(
"--headless",
action="store_true",
help="Run selenium tests in headless mode, if the browser supports the option.",
)
parser.add_argument(
"--selenium-hub",
help="A URL for a selenium hub instance to use in combination with --selenium.",
)
parser.add_argument(
"--external-host",
default=socket.gethostname(),
help=(
"The external host that can be reached by the selenium hub instance when "
"running Selenium tests via Selenium Hub."
),
)
parser.add_argument(
"--debug-sql",
action="store_true",
help="Turn on the SQL query logger within tests.",
)
# 0 is converted to "auto" or 1 later on, depending on a method used by
# multiprocessing to start subprocesses and on the backend support for
# cloning databases.
parser.add_argument(
"--parallel",
nargs="?",
const="auto",
default=0,
type=parallel_type,
metavar="N",
help=(
'Run tests using up to N parallel processes. Use the value "auto" '
"to run one test process for each processor core."
),
)
parser.add_argument(
"--tag",
dest="tags",
action="append",
help="Run only tests with the specified tags. Can be used multiple times.",
)
parser.add_argument(
"--exclude-tag",
dest="exclude_tags",
action="append",
help="Do not run tests with the specified tag. Can be used multiple times.",
)
parser.add_argument(
"--start-after",
dest="start_after",
help="Run tests starting after the specified top-level module.",
)
parser.add_argument(
"--start-at",
dest="start_at",
help="Run tests starting at the specified top-level module.",
)
parser.add_argument(
"--pdb", action="store_true", help="Runs the PDB debugger on error or failure."
)
parser.add_argument(
"-b",
"--buffer",
action="store_true",
help="Discard output of passing tests.",
)
parser.add_argument(
"--timing",
action="store_true",
help="Output timings, including database set up and total run time.",
)
parser.add_argument(
"-k",
dest="test_name_patterns",
action="append",
help=(
"Only run test methods and classes matching test name pattern. "
"Same as unittest -k option. Can be used multiple times."
),
)
options = parser.parse_args()
using_selenium_hub = options.selenium and options.selenium_hub
if options.selenium_hub and not options.selenium:
parser.error(
"--selenium-hub and --external-host require --selenium to be used."
)
if using_selenium_hub and not options.external_host:
parser.error("--selenium-hub and --external-host must be used together.")
# Allow including a trailing slash on app_labels for tab completion convenience
options.modules = [os.path.normpath(labels) for labels in options.modules]
mutually_exclusive_options = [
options.start_at,
options.start_after,
options.modules,
]
enabled_module_options = [
bool(option) for option in mutually_exclusive_options
].count(True)
if enabled_module_options > 1:
print(
"Aborting: --start-at, --start-after, and test labels are mutually "
"exclusive."
)
sys.exit(1)
for opt_name in ["start_at", "start_after"]:
opt_val = getattr(options, opt_name)
if opt_val:
if "." in opt_val:
print(
"Aborting: --%s must be a top-level module."
% opt_name.replace("_", "-")
)
sys.exit(1)
setattr(options, opt_name, os.path.normpath(opt_val))
if options.settings:
os.environ["DJANGO_SETTINGS_MODULE"] = options.settings
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite")
options.settings = os.environ["DJANGO_SETTINGS_MODULE"]
if options.selenium:
if multiprocessing.get_start_method() == "spawn" and options.parallel != 1:
parser.error(
"You cannot use --selenium with parallel tests on this system. "
"Pass --parallel=1 to use --selenium."
)
if not options.tags:
options.tags = ["selenium"]
elif "selenium" not in options.tags:
options.tags.append("selenium")
if options.selenium_hub:
SeleniumTestCaseBase.selenium_hub = options.selenium_hub
SeleniumTestCaseBase.external_host = options.external_host
SeleniumTestCaseBase.headless = options.headless
SeleniumTestCaseBase.browsers = options.selenium
if options.bisect:
bisect_tests(
options.bisect,
options,
options.modules,
options.start_at,
options.start_after,
)
elif options.pair:
paired_tests(
options.pair,
options,
options.modules,
options.start_at,
options.start_after,
)
else:
time_keeper = TimeKeeper() if options.timing else NullTimeKeeper()
with time_keeper.timed("Total run"):
failures = django_tests(
options.verbosity,
options.interactive,
options.failfast,
options.keepdb,
options.reverse,
options.modules,
options.debug_sql,
options.parallel,
options.tags,
options.exclude_tags,
getattr(options, "test_name_patterns", None),
options.start_at,
options.start_after,
options.pdb,
options.buffer,
options.timing,
options.shuffle,
)
time_keeper.print_results()
if failures:
sys.exit(1)
|
1ff6bbd9f236c05c96686bff331c14cd4a79bd1870fef9c36436d37276d3c90b | # Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't picklable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
from os.path import abspath, dirname, join
# Workaround for sphinx-build recursion limit overflow:
# pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
# RuntimeError: maximum recursion depth exceeded while pickling an object
#
# Python's default allowed recursion depth is 1000 but this isn't enough for
# building docs/ref/settings.txt sometimes.
# https://groups.google.com/g/sphinx-dev/c/MtRf64eGtv4/discussion
sys.setrecursionlimit(2000)
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "4.5.0"
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"djangodocs",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
]
# AutosectionLabel settings.
# Uses a <page>:<label> schema which doesn't work for duplicate sub-section
# labels, so set max depth.
autosectionlabel_prefix_document = True
autosectionlabel_maxdepth = 2
# Linkcheck settings.
linkcheck_ignore = [
# Special-use addresses and domain names. (RFC 6761/6890)
r"^https?://(?:127\.0\.0\.1|\[::1\])(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*example\.(?:com|net|org)(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*(?:example|invalid|localhost|test)(?::\d+)?/",
# Pages that are inaccessible because they require authentication.
r"^https://github\.com/[^/]+/[^/]+/fork",
r"^https://code\.djangoproject\.com/github/login",
r"^https://code\.djangoproject\.com/newticket",
r"^https://(?:code|www)\.djangoproject\.com/admin/",
r"^https://www\.djangoproject\.com/community/add/blogs/",
r"^https://www\.google\.com/webmasters/tools/ping",
r"^https://search\.google\.com/search-console/welcome",
# Fragments used to dynamically switch content or populate fields.
r"^https://web\.libera\.chat/#",
r"^https://github\.com/[^#]+#L\d+-L\d+$",
r"^https://help\.apple\.com/itc/podcasts_connect/#/itc",
# Anchors on certain pages with missing a[name] attributes.
r"^https://tools\.ietf\.org/html/rfc1123\.html#section-",
]
# Spelling check needs an additional module that is not installed by default.
# Add it only if spelling check is requested so docs can be generated without it.
if "spelling" in sys.argv:
extensions.append("sphinxcontrib.spelling")
# Spelling language.
spelling_lang = "en_US"
# Location of word list.
spelling_word_list_filename = "spelling_wordlist"
spelling_warning = True
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = ".txt"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The root toctree document.
root_doc = "contents"
# Disable auto-created table of contents entries for all domain objects (e.g.
# functions, classes, attributes, etc.) in Sphinx 5.2+.
toc_object_entries = False
# General substitutions.
project = "Django"
copyright = "Django Software Foundation and contributors"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "5.0"
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep440ver = get_version()
if VERSION[3:5] == ("alpha", 0) and "dev" not in pep440ver:
return pep440ver + ".dev"
return pep440ver
release = django_release()
# The "development version" of Django
django_next_version = "5.0"
extlinks = {
"bpo": ("https://bugs.python.org/issue?@action=redirect&bpo=%s", "bpo-%s"),
"commit": ("https://github.com/django/django/commit/%s", "%s"),
"cve": ("https://nvd.nist.gov/vuln/detail/CVE-%s", "CVE-%s"),
# A file or directory. GitHub redirects from blob to tree if needed.
"source": ("https://github.com/django/django/blob/main/%s", "%s"),
"ticket": ("https://code.djangoproject.com/ticket/%s", "#%s"),
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ["locale/"]
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = "%B %d, %Y"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build", "_theme", "requirements.txt"]
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "default-role-error"
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "trac"
# Links to Python's docs should reference the most recent version of the 3.x
# branch, which is located at this URL.
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"sphinx": ("https://www.sphinx-doc.org/en/master/", None),
"psycopg": ("https://www.psycopg.org/psycopg3/docs/", None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# The 'versionadded' and 'versionchanged' directives are overridden.
suppress_warnings = ["app.add_directive"]
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = "%b %d, %Y"
# Content template for the index page.
# html_index = ''
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "Djangodoc"
modindex_common_prefix = ["django."]
# Appended to every page
rst_epilog = """
.. |django-users| replace:: :ref:`django-users <django-users-mailing-list>`
.. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>`
.. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>`
.. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>`
""" # NOQA
# -- Options for LaTeX output --------------------------------------------------
# Use XeLaTeX for Unicode support.
latex_engine = "xelatex"
latex_use_xindy = False
# Set font for CJK and fallbacks for unicode characters.
latex_elements = {
"fontpkg": r"""
\setmainfont{Symbola}
""",
"preamble": r"""
\usepackage{newunicodechar}
\usepackage[UTF8]{ctex}
\newunicodechar{π}{\ensuremath{\pi}}
\newunicodechar{≤}{\ensuremath{\le}}
\newunicodechar{≥}{\ensuremath{\ge}}
\newunicodechar{♥}{\ensuremath{\heartsuit}}
\newunicodechar{…}{\ensuremath{\ldots}}
""",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
# latex_documents = []
latex_documents = [
(
"contents",
"django.tex",
"Django Documentation",
"Django Software Foundation",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"ref/django-admin",
"django-admin",
"Utility script for the Django web framework",
["Django Software Foundation"],
1,
)
]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents = [
(
root_doc,
"django",
"",
"",
"Django",
"Documentation of the Django framework",
"Web development",
False,
)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = "Django Software Foundation"
epub_publisher = "Django Software Foundation"
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = 'Django'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = "djangodocs-epub"
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
epub_cover = ("", "epub-cover.html")
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
# epub_fix_images = False
# Scale large images.
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# epub_show_urls = 'inline'
# If false, no index is generated.
# epub_use_index = True
|
7fc56287bf916c1e9330e44ae94de9359b3ed607f9816da5257b1fd1fa19790e | import argparse
import ctypes
import faulthandler
import hashlib
import io
import itertools
import logging
import multiprocessing
import os
import pickle
import random
import sys
import textwrap
import unittest
from collections import defaultdict
from contextlib import contextmanager
from importlib import import_module
from io import StringIO
import sqlparse
import django
from django.core.management import call_command
from django.db import connections
from django.test import SimpleTestCase, TestCase
from django.test.utils import NullTimeKeeper, TimeKeeper, iter_test_cases
from django.test.utils import setup_databases as _setup_databases
from django.test.utils import setup_test_environment
from django.test.utils import teardown_databases as _teardown_databases
from django.test.utils import teardown_test_environment
from django.utils.datastructures import OrderedSet
try:
import ipdb as pdb
except ImportError:
import pdb
try:
import tblib.pickling_support
except ImportError:
tblib = None
class DebugSQLTextTestResult(unittest.TextTestResult):
def __init__(self, stream, descriptions, verbosity):
self.logger = logging.getLogger("django.db.backends")
self.logger.setLevel(logging.DEBUG)
self.debug_sql_stream = None
super().__init__(stream, descriptions, verbosity)
def startTest(self, test):
self.debug_sql_stream = StringIO()
self.handler = logging.StreamHandler(self.debug_sql_stream)
self.logger.addHandler(self.handler)
super().startTest(test)
def stopTest(self, test):
super().stopTest(test)
self.logger.removeHandler(self.handler)
if self.showAll:
self.debug_sql_stream.seek(0)
self.stream.write(self.debug_sql_stream.read())
self.stream.writeln(self.separator2)
def addError(self, test, err):
super().addError(test, err)
if self.debug_sql_stream is None:
# Error before tests e.g. in setUpTestData().
sql = ""
else:
self.debug_sql_stream.seek(0)
sql = self.debug_sql_stream.read()
self.errors[-1] = self.errors[-1] + (sql,)
def addFailure(self, test, err):
super().addFailure(test, err)
self.debug_sql_stream.seek(0)
self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),)
def addSubTest(self, test, subtest, err):
super().addSubTest(test, subtest, err)
if err is not None:
self.debug_sql_stream.seek(0)
errors = (
self.failures
if issubclass(err[0], test.failureException)
else self.errors
)
errors[-1] = errors[-1] + (self.debug_sql_stream.read(),)
def printErrorList(self, flavour, errors):
for test, err, sql_debug in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln(err)
self.stream.writeln(self.separator2)
self.stream.writeln(
sqlparse.format(sql_debug, reindent=True, keyword_case="upper")
)
class PDBDebugResult(unittest.TextTestResult):
"""
Custom result class that triggers a PDB session when an error or failure
occurs.
"""
def addError(self, test, err):
super().addError(test, err)
self.debug(err)
def addFailure(self, test, err):
super().addFailure(test, err)
self.debug(err)
def addSubTest(self, test, subtest, err):
if err is not None:
self.debug(err)
super().addSubTest(test, subtest, err)
def debug(self, error):
self._restoreStdout()
self.buffer = False
exc_type, exc_value, traceback = error
print("\nOpening PDB: %r" % exc_value)
pdb.post_mortem(traceback)
class DummyList:
"""
Dummy list class for faking storage of results in unittest.TestResult.
"""
__slots__ = ()
def append(self, item):
pass
class RemoteTestResult(unittest.TestResult):
"""
Extend unittest.TestResult to record events in the child processes so they
can be replayed in the parent process. Events include things like which
tests succeeded or failed.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Fake storage of results to reduce memory usage. These are used by the
# unittest default methods, but here 'events' is used instead.
dummy_list = DummyList()
self.failures = dummy_list
self.errors = dummy_list
self.skipped = dummy_list
self.expectedFailures = dummy_list
self.unexpectedSuccesses = dummy_list
if tblib is not None:
tblib.pickling_support.install()
self.events = []
def __getstate__(self):
# Make this class picklable by removing the file-like buffer
# attributes. This is possible since they aren't used after unpickling
# after being sent to ParallelTestSuite.
state = self.__dict__.copy()
state.pop("_stdout_buffer", None)
state.pop("_stderr_buffer", None)
state.pop("_original_stdout", None)
state.pop("_original_stderr", None)
return state
@property
def test_index(self):
return self.testsRun - 1
def _confirm_picklable(self, obj):
"""
Confirm that obj can be pickled and unpickled as multiprocessing will
need to pickle the exception in the child process and unpickle it in
the parent process. Let the exception rise, if not.
"""
pickle.loads(pickle.dumps(obj))
def _print_unpicklable_subtest(self, test, subtest, pickle_exc):
print(
"""
Subtest failed:
test: {}
subtest: {}
Unfortunately, the subtest that failed cannot be pickled, so the parallel
test runner cannot handle it cleanly. Here is the pickling error:
> {}
You should re-run this test with --parallel=1 to reproduce the failure
with a cleaner failure message.
""".format(
test, subtest, pickle_exc
)
)
def check_picklable(self, test, err):
# Ensure that sys.exc_info() tuples are picklable. This displays a
# clear multiprocessing.pool.RemoteTraceback generated in the child
# process instead of a multiprocessing.pool.MaybeEncodingError, making
# the root cause easier to figure out for users who aren't familiar
# with the multiprocessing module. Since we're in a forked process,
# our best chance to communicate with them is to print to stdout.
try:
self._confirm_picklable(err)
except Exception as exc:
original_exc_txt = repr(err[1])
original_exc_txt = textwrap.fill(
original_exc_txt, 75, initial_indent=" ", subsequent_indent=" "
)
pickle_exc_txt = repr(exc)
pickle_exc_txt = textwrap.fill(
pickle_exc_txt, 75, initial_indent=" ", subsequent_indent=" "
)
if tblib is None:
print(
"""
{} failed:
{}
Unfortunately, tracebacks cannot be pickled, making it impossible for the
parallel test runner to handle this exception cleanly.
In order to see the traceback, you should install tblib:
python -m pip install tblib
""".format(
test, original_exc_txt
)
)
else:
print(
"""
{} failed:
{}
Unfortunately, the exception it raised cannot be pickled, making it impossible
for the parallel test runner to handle it cleanly.
Here's the error encountered while trying to pickle the exception:
{}
You should re-run this test with the --parallel=1 option to reproduce the
failure and get a correct traceback.
""".format(
test, original_exc_txt, pickle_exc_txt
)
)
raise
def check_subtest_picklable(self, test, subtest):
try:
self._confirm_picklable(subtest)
except Exception as exc:
self._print_unpicklable_subtest(test, subtest, exc)
raise
def startTestRun(self):
super().startTestRun()
self.events.append(("startTestRun",))
def stopTestRun(self):
super().stopTestRun()
self.events.append(("stopTestRun",))
def startTest(self, test):
super().startTest(test)
self.events.append(("startTest", self.test_index))
def stopTest(self, test):
super().stopTest(test)
self.events.append(("stopTest", self.test_index))
def addError(self, test, err):
self.check_picklable(test, err)
self.events.append(("addError", self.test_index, err))
super().addError(test, err)
def addFailure(self, test, err):
self.check_picklable(test, err)
self.events.append(("addFailure", self.test_index, err))
super().addFailure(test, err)
def addSubTest(self, test, subtest, err):
# Follow Python's implementation of unittest.TestResult.addSubTest() by
# not doing anything when a subtest is successful.
if err is not None:
# Call check_picklable() before check_subtest_picklable() since
# check_picklable() performs the tblib check.
self.check_picklable(test, err)
self.check_subtest_picklable(test, subtest)
self.events.append(("addSubTest", self.test_index, subtest, err))
super().addSubTest(test, subtest, err)
def addSuccess(self, test):
self.events.append(("addSuccess", self.test_index))
super().addSuccess(test)
def addSkip(self, test, reason):
self.events.append(("addSkip", self.test_index, reason))
super().addSkip(test, reason)
def addExpectedFailure(self, test, err):
# If tblib isn't installed, pickling the traceback will always fail.
# However we don't want tblib to be required for running the tests
# when they pass or fail as expected. Drop the traceback when an
# expected failure occurs.
if tblib is None:
err = err[0], err[1], None
self.check_picklable(test, err)
self.events.append(("addExpectedFailure", self.test_index, err))
super().addExpectedFailure(test, err)
def addUnexpectedSuccess(self, test):
self.events.append(("addUnexpectedSuccess", self.test_index))
super().addUnexpectedSuccess(test)
def wasSuccessful(self):
"""Tells whether or not this result was a success."""
failure_types = {"addError", "addFailure", "addSubTest", "addUnexpectedSuccess"}
return all(e[0] not in failure_types for e in self.events)
def _exc_info_to_string(self, err, test):
# Make this method no-op. It only powers the default unittest behavior
# for recording errors, but this class pickles errors into 'events'
# instead.
return ""
class RemoteTestRunner:
"""
Run tests and record everything but don't display anything.
The implementation matches the unpythonic coding style of unittest2.
"""
resultclass = RemoteTestResult
def __init__(self, failfast=False, resultclass=None, buffer=False):
self.failfast = failfast
self.buffer = buffer
if resultclass is not None:
self.resultclass = resultclass
def run(self, test):
result = self.resultclass()
unittest.registerResult(result)
result.failfast = self.failfast
result.buffer = self.buffer
test(result)
return result
def get_max_test_processes():
"""
The maximum number of test processes when using the --parallel option.
"""
# The current implementation of the parallel test runner requires
# multiprocessing to start subprocesses with fork() or spawn().
if multiprocessing.get_start_method() not in {"fork", "spawn"}:
return 1
try:
return int(os.environ["DJANGO_TEST_PROCESSES"])
except KeyError:
return multiprocessing.cpu_count()
def parallel_type(value):
"""Parse value passed to the --parallel option."""
if value == "auto":
return value
try:
return int(value)
except ValueError:
raise argparse.ArgumentTypeError(
f"{value!r} is not an integer or the string 'auto'"
)
_worker_id = 0
def _init_worker(
counter,
initial_settings=None,
serialized_contents=None,
process_setup=None,
process_setup_args=None,
debug_mode=None,
):
"""
Switch to databases dedicated to this worker.
This helper lives at module-level because of the multiprocessing module's
requirements.
"""
global _worker_id
with counter.get_lock():
counter.value += 1
_worker_id = counter.value
start_method = multiprocessing.get_start_method()
if start_method == "spawn":
if process_setup and callable(process_setup):
if process_setup_args is None:
process_setup_args = ()
process_setup(*process_setup_args)
django.setup()
setup_test_environment(debug=debug_mode)
for alias in connections:
connection = connections[alias]
if start_method == "spawn":
# Restore initial settings in spawned processes.
connection.settings_dict.update(initial_settings[alias])
if value := serialized_contents.get(alias):
connection._test_serialized_contents = value
connection.creation.setup_worker_connection(_worker_id)
def _run_subsuite(args):
"""
Run a suite of tests with a RemoteTestRunner and return a RemoteTestResult.
This helper lives at module-level and its arguments are wrapped in a tuple
because of the multiprocessing module's requirements.
"""
runner_class, subsuite_index, subsuite, failfast, buffer = args
runner = runner_class(failfast=failfast, buffer=buffer)
result = runner.run(subsuite)
return subsuite_index, result.events
def _process_setup_stub(*args):
"""Stub method to simplify run() implementation."""
pass
class ParallelTestSuite(unittest.TestSuite):
"""
Run a series of tests in parallel in several processes.
While the unittest module's documentation implies that orchestrating the
execution of tests is the responsibility of the test runner, in practice,
it appears that TestRunner classes are more concerned with formatting and
displaying test results.
Since there are fewer use cases for customizing TestSuite than TestRunner,
implementing parallelization at the level of the TestSuite improves
interoperability with existing custom test runners. A single instance of a
test runner can still collect results from all tests without being aware
that they have been run in parallel.
"""
# In case someone wants to modify these in a subclass.
init_worker = _init_worker
process_setup = _process_setup_stub
process_setup_args = ()
run_subsuite = _run_subsuite
runner_class = RemoteTestRunner
def __init__(
self, subsuites, processes, failfast=False, debug_mode=False, buffer=False
):
self.subsuites = subsuites
self.processes = processes
self.failfast = failfast
self.debug_mode = debug_mode
self.buffer = buffer
self.initial_settings = None
self.serialized_contents = None
super().__init__()
def run(self, result):
"""
Distribute test cases across workers.
Return an identifier of each test case with its result in order to use
imap_unordered to show results as soon as they're available.
To minimize pickling errors when getting results from workers:
- pass back numeric indexes in self.subsuites instead of tests
- make tracebacks picklable with tblib, if available
Even with tblib, errors may still occur for dynamically created
exception classes which cannot be unpickled.
"""
self.initialize_suite()
counter = multiprocessing.Value(ctypes.c_int, 0)
pool = multiprocessing.Pool(
processes=self.processes,
initializer=self.init_worker.__func__,
initargs=[
counter,
self.initial_settings,
self.serialized_contents,
self.process_setup.__func__,
self.process_setup_args,
self.debug_mode,
],
)
args = [
(self.runner_class, index, subsuite, self.failfast, self.buffer)
for index, subsuite in enumerate(self.subsuites)
]
test_results = pool.imap_unordered(self.run_subsuite.__func__, args)
while True:
if result.shouldStop:
pool.terminate()
break
try:
subsuite_index, events = test_results.next(timeout=0.1)
except multiprocessing.TimeoutError:
continue
except StopIteration:
pool.close()
break
tests = list(self.subsuites[subsuite_index])
for event in events:
event_name = event[0]
handler = getattr(result, event_name, None)
if handler is None:
continue
test = tests[event[1]]
args = event[2:]
handler(test, *args)
pool.join()
return result
def __iter__(self):
return iter(self.subsuites)
def initialize_suite(self):
if multiprocessing.get_start_method() == "spawn":
self.initial_settings = {
alias: connections[alias].settings_dict for alias in connections
}
self.serialized_contents = {
alias: connections[alias]._test_serialized_contents
for alias in connections
if alias in self.serialized_aliases
}
class Shuffler:
"""
This class implements shuffling with a special consistency property.
Consistency means that, for a given seed and key function, if two sets of
items are shuffled, the resulting order will agree on the intersection of
the two sets. For example, if items are removed from an original set, the
shuffled order for the new set will be the shuffled order of the original
set restricted to the smaller set.
"""
# This doesn't need to be cryptographically strong, so use what's fastest.
hash_algorithm = "md5"
@classmethod
def _hash_text(cls, text):
h = hashlib.new(cls.hash_algorithm, usedforsecurity=False)
h.update(text.encode("utf-8"))
return h.hexdigest()
def __init__(self, seed=None):
if seed is None:
# Limit seeds to 10 digits for simpler output.
seed = random.randint(0, 10**10 - 1)
seed_source = "generated"
else:
seed_source = "given"
self.seed = seed
self.seed_source = seed_source
@property
def seed_display(self):
return f"{self.seed!r} ({self.seed_source})"
def _hash_item(self, item, key):
text = "{}{}".format(self.seed, key(item))
return self._hash_text(text)
def shuffle(self, items, key):
"""
Return a new list of the items in a shuffled order.
The `key` is a function that accepts an item in `items` and returns
a string unique for that item that can be viewed as a string id. The
order of the return value is deterministic. It depends on the seed
and key function but not on the original order.
"""
hashes = {}
for item in items:
hashed = self._hash_item(item, key)
if hashed in hashes:
msg = "item {!r} has same hash {!r} as item {!r}".format(
item,
hashed,
hashes[hashed],
)
raise RuntimeError(msg)
hashes[hashed] = item
return [hashes[hashed] for hashed in sorted(hashes)]
class DiscoverRunner:
"""A Django test runner that uses unittest2 test discovery."""
test_suite = unittest.TestSuite
parallel_test_suite = ParallelTestSuite
test_runner = unittest.TextTestRunner
test_loader = unittest.defaultTestLoader
reorder_by = (TestCase, SimpleTestCase)
def __init__(
self,
pattern=None,
top_level=None,
verbosity=1,
interactive=True,
failfast=False,
keepdb=False,
reverse=False,
debug_mode=False,
debug_sql=False,
parallel=0,
tags=None,
exclude_tags=None,
test_name_patterns=None,
pdb=False,
buffer=False,
enable_faulthandler=True,
timing=False,
shuffle=False,
logger=None,
**kwargs,
):
self.pattern = pattern
self.top_level = top_level
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
self.keepdb = keepdb
self.reverse = reverse
self.debug_mode = debug_mode
self.debug_sql = debug_sql
self.parallel = parallel
self.tags = set(tags or [])
self.exclude_tags = set(exclude_tags or [])
if not faulthandler.is_enabled() and enable_faulthandler:
try:
faulthandler.enable(file=sys.stderr.fileno())
except (AttributeError, io.UnsupportedOperation):
faulthandler.enable(file=sys.__stderr__.fileno())
self.pdb = pdb
if self.pdb and self.parallel > 1:
raise ValueError(
"You cannot use --pdb with parallel tests; pass --parallel=1 to use it."
)
self.buffer = buffer
self.test_name_patterns = None
self.time_keeper = TimeKeeper() if timing else NullTimeKeeper()
if test_name_patterns:
# unittest does not export the _convert_select_pattern function
# that converts command-line arguments to patterns.
self.test_name_patterns = {
pattern if "*" in pattern else "*%s*" % pattern
for pattern in test_name_patterns
}
self.shuffle = shuffle
self._shuffler = None
self.logger = logger
@classmethod
def add_arguments(cls, parser):
parser.add_argument(
"-t",
"--top-level-directory",
dest="top_level",
help="Top level of project for unittest discovery.",
)
parser.add_argument(
"-p",
"--pattern",
default="test*.py",
help="The test matching pattern. Defaults to test*.py.",
)
parser.add_argument(
"--keepdb", action="store_true", help="Preserves the test DB between runs."
)
parser.add_argument(
"--shuffle",
nargs="?",
default=False,
type=int,
metavar="SEED",
help="Shuffles test case order.",
)
parser.add_argument(
"-r",
"--reverse",
action="store_true",
help="Reverses test case order.",
)
parser.add_argument(
"--debug-mode",
action="store_true",
help="Sets settings.DEBUG to True.",
)
parser.add_argument(
"-d",
"--debug-sql",
action="store_true",
help="Prints logged SQL queries on failure.",
)
parser.add_argument(
"--parallel",
nargs="?",
const="auto",
default=0,
type=parallel_type,
metavar="N",
help=(
"Run tests using up to N parallel processes. Use the value "
'"auto" to run one test process for each processor core.'
),
)
parser.add_argument(
"--tag",
action="append",
dest="tags",
help="Run only tests with the specified tag. Can be used multiple times.",
)
parser.add_argument(
"--exclude-tag",
action="append",
dest="exclude_tags",
help="Do not run tests with the specified tag. Can be used multiple times.",
)
parser.add_argument(
"--pdb",
action="store_true",
help="Runs a debugger (pdb, or ipdb if installed) on error or failure.",
)
parser.add_argument(
"-b",
"--buffer",
action="store_true",
help="Discard output from passing tests.",
)
parser.add_argument(
"--no-faulthandler",
action="store_false",
dest="enable_faulthandler",
help="Disables the Python faulthandler module during tests.",
)
parser.add_argument(
"--timing",
action="store_true",
help=("Output timings, including database set up and total run time."),
)
parser.add_argument(
"-k",
action="append",
dest="test_name_patterns",
help=(
"Only run test methods and classes that match the pattern "
"or substring. Can be used multiple times. Same as "
"unittest -k option."
),
)
@property
def shuffle_seed(self):
if self._shuffler is None:
return None
return self._shuffler.seed
def log(self, msg, level=None):
"""
Log the message at the given logging level (the default is INFO).
If a logger isn't set, the message is instead printed to the console,
respecting the configured verbosity. A verbosity of 0 prints no output,
a verbosity of 1 prints INFO and above, and a verbosity of 2 or higher
prints all levels.
"""
if level is None:
level = logging.INFO
if self.logger is None:
if self.verbosity <= 0 or (self.verbosity == 1 and level < logging.INFO):
return
print(msg)
else:
self.logger.log(level, msg)
def setup_test_environment(self, **kwargs):
setup_test_environment(debug=self.debug_mode)
unittest.installHandler()
def setup_shuffler(self):
if self.shuffle is False:
return
shuffler = Shuffler(seed=self.shuffle)
self.log(f"Using shuffle seed: {shuffler.seed_display}")
self._shuffler = shuffler
@contextmanager
def load_with_patterns(self):
original_test_name_patterns = self.test_loader.testNamePatterns
self.test_loader.testNamePatterns = self.test_name_patterns
try:
yield
finally:
# Restore the original patterns.
self.test_loader.testNamePatterns = original_test_name_patterns
def load_tests_for_label(self, label, discover_kwargs):
label_as_path = os.path.abspath(label)
tests = None
# If a module, or "module.ClassName[.method_name]", just run those.
if not os.path.exists(label_as_path):
with self.load_with_patterns():
tests = self.test_loader.loadTestsFromName(label)
if tests.countTestCases():
return tests
# Try discovery if "label" is a package or directory.
is_importable, is_package = try_importing(label)
if is_importable:
if not is_package:
return tests
elif not os.path.isdir(label_as_path):
if os.path.exists(label_as_path):
assert tests is None
raise RuntimeError(
f"One of the test labels is a path to a file: {label!r}, "
f"which is not supported. Use a dotted module name or "
f"path to a directory instead."
)
return tests
kwargs = discover_kwargs.copy()
if os.path.isdir(label_as_path) and not self.top_level:
kwargs["top_level_dir"] = find_top_level(label_as_path)
with self.load_with_patterns():
tests = self.test_loader.discover(start_dir=label, **kwargs)
# Make unittest forget the top-level dir it calculated from this run,
# to support running tests from two different top-levels.
self.test_loader._top_level_dir = None
return tests
def build_suite(self, test_labels=None, **kwargs):
test_labels = test_labels or ["."]
discover_kwargs = {}
if self.pattern is not None:
discover_kwargs["pattern"] = self.pattern
if self.top_level is not None:
discover_kwargs["top_level_dir"] = self.top_level
self.setup_shuffler()
all_tests = []
for label in test_labels:
tests = self.load_tests_for_label(label, discover_kwargs)
all_tests.extend(iter_test_cases(tests))
if self.tags or self.exclude_tags:
if self.tags:
self.log(
"Including test tag(s): %s." % ", ".join(sorted(self.tags)),
level=logging.DEBUG,
)
if self.exclude_tags:
self.log(
"Excluding test tag(s): %s." % ", ".join(sorted(self.exclude_tags)),
level=logging.DEBUG,
)
all_tests = filter_tests_by_tags(all_tests, self.tags, self.exclude_tags)
# Put the failures detected at load time first for quicker feedback.
# _FailedTest objects include things like test modules that couldn't be
# found or that couldn't be loaded due to syntax errors.
test_types = (unittest.loader._FailedTest, *self.reorder_by)
all_tests = list(
reorder_tests(
all_tests,
test_types,
shuffler=self._shuffler,
reverse=self.reverse,
)
)
self.log("Found %d test(s)." % len(all_tests))
suite = self.test_suite(all_tests)
if self.parallel > 1:
subsuites = partition_suite_by_case(suite)
# Since tests are distributed across processes on a per-TestCase
# basis, there's no need for more processes than TestCases.
processes = min(self.parallel, len(subsuites))
# Update also "parallel" because it's used to determine the number
# of test databases.
self.parallel = processes
if processes > 1:
suite = self.parallel_test_suite(
subsuites,
processes,
self.failfast,
self.debug_mode,
self.buffer,
)
return suite
def setup_databases(self, **kwargs):
return _setup_databases(
self.verbosity,
self.interactive,
time_keeper=self.time_keeper,
keepdb=self.keepdb,
debug_sql=self.debug_sql,
parallel=self.parallel,
**kwargs,
)
def get_resultclass(self):
if self.debug_sql:
return DebugSQLTextTestResult
elif self.pdb:
return PDBDebugResult
def get_test_runner_kwargs(self):
return {
"failfast": self.failfast,
"resultclass": self.get_resultclass(),
"verbosity": self.verbosity,
"buffer": self.buffer,
}
def run_checks(self, databases):
# Checks are run after database creation since some checks require
# database access.
call_command("check", verbosity=self.verbosity, databases=databases)
def run_suite(self, suite, **kwargs):
kwargs = self.get_test_runner_kwargs()
runner = self.test_runner(**kwargs)
try:
return runner.run(suite)
finally:
if self._shuffler is not None:
seed_display = self._shuffler.seed_display
self.log(f"Used shuffle seed: {seed_display}")
def teardown_databases(self, old_config, **kwargs):
"""Destroy all the non-mirror databases."""
_teardown_databases(
old_config,
verbosity=self.verbosity,
parallel=self.parallel,
keepdb=self.keepdb,
)
def teardown_test_environment(self, **kwargs):
unittest.removeHandler()
teardown_test_environment()
def suite_result(self, suite, result, **kwargs):
return (
len(result.failures) + len(result.errors) + len(result.unexpectedSuccesses)
)
def _get_databases(self, suite):
databases = {}
for test in iter_test_cases(suite):
test_databases = getattr(test, "databases", None)
if test_databases == "__all__":
test_databases = connections
if test_databases:
serialized_rollback = getattr(test, "serialized_rollback", False)
databases.update(
(alias, serialized_rollback or databases.get(alias, False))
for alias in test_databases
)
return databases
def get_databases(self, suite):
databases = self._get_databases(suite)
unused_databases = [alias for alias in connections if alias not in databases]
if unused_databases:
self.log(
"Skipping setup of unused database(s): %s."
% ", ".join(sorted(unused_databases)),
level=logging.DEBUG,
)
return databases
def run_tests(self, test_labels, **kwargs):
"""
Run the unit tests for all the test labels in the provided list.
Test labels should be dotted Python paths to test modules, test
classes, or test methods.
Return the number of tests that failed.
"""
self.setup_test_environment()
suite = self.build_suite(test_labels)
databases = self.get_databases(suite)
suite.serialized_aliases = set(
alias for alias, serialize in databases.items() if serialize
)
with self.time_keeper.timed("Total database setup"):
old_config = self.setup_databases(
aliases=databases,
serialized_aliases=suite.serialized_aliases,
)
run_failed = False
try:
self.run_checks(databases)
result = self.run_suite(suite)
except Exception:
run_failed = True
raise
finally:
try:
with self.time_keeper.timed("Total database teardown"):
self.teardown_databases(old_config)
self.teardown_test_environment()
except Exception:
# Silence teardown exceptions if an exception was raised during
# runs to avoid shadowing it.
if not run_failed:
raise
self.time_keeper.print_results()
return self.suite_result(suite, result)
def try_importing(label):
"""
Try importing a test label, and return (is_importable, is_package).
Relative labels like "." and ".." are seen as directories.
"""
try:
mod = import_module(label)
except (ImportError, TypeError):
return (False, False)
return (True, hasattr(mod, "__path__"))
def find_top_level(top_level):
# Try to be a bit smarter than unittest about finding the default top-level
# for a given directory path, to avoid breaking relative imports.
# (Unittest's default is to set top-level equal to the path, which means
# relative imports will result in "Attempted relative import in
# non-package.").
# We'd be happy to skip this and require dotted module paths (which don't
# cause this problem) instead of file paths (which do), but in the case of
# a directory in the cwd, which would be equally valid if considered as a
# top-level module or as a directory path, unittest unfortunately prefers
# the latter.
while True:
init_py = os.path.join(top_level, "__init__.py")
if not os.path.exists(init_py):
break
try_next = os.path.dirname(top_level)
if try_next == top_level:
# __init__.py all the way down? give up.
break
top_level = try_next
return top_level
def _class_shuffle_key(cls):
return f"{cls.__module__}.{cls.__qualname__}"
def shuffle_tests(tests, shuffler):
"""
Return an iterator over the given tests in a shuffled order, keeping tests
next to other tests of their class.
`tests` should be an iterable of tests.
"""
tests_by_type = {}
for _, class_tests in itertools.groupby(tests, type):
class_tests = list(class_tests)
test_type = type(class_tests[0])
class_tests = shuffler.shuffle(class_tests, key=lambda test: test.id())
tests_by_type[test_type] = class_tests
classes = shuffler.shuffle(tests_by_type, key=_class_shuffle_key)
return itertools.chain(*(tests_by_type[cls] for cls in classes))
def reorder_test_bin(tests, shuffler=None, reverse=False):
"""
Return an iterator that reorders the given tests, keeping tests next to
other tests of their class.
`tests` should be an iterable of tests that supports reversed().
"""
if shuffler is None:
if reverse:
return reversed(tests)
# The function must return an iterator.
return iter(tests)
tests = shuffle_tests(tests, shuffler)
if not reverse:
return tests
# Arguments to reversed() must be reversible.
return reversed(list(tests))
def reorder_tests(tests, classes, reverse=False, shuffler=None):
"""
Reorder an iterable of tests, grouping by the given TestCase classes.
This function also removes any duplicates and reorders so that tests of the
same type are consecutive.
The result is returned as an iterator. `classes` is a sequence of types.
Tests that are instances of `classes[0]` are grouped first, followed by
instances of `classes[1]`, etc. Tests that are not instances of any of the
classes are grouped last.
If `reverse` is True, the tests within each `classes` group are reversed,
but without reversing the order of `classes` itself.
The `shuffler` argument is an optional instance of this module's `Shuffler`
class. If provided, tests will be shuffled within each `classes` group, but
keeping tests with other tests of their TestCase class. Reversing is
applied after shuffling to allow reversing the same random order.
"""
# Each bin maps TestCase class to OrderedSet of tests. This permits tests
# to be grouped by TestCase class even if provided non-consecutively.
bins = [defaultdict(OrderedSet) for i in range(len(classes) + 1)]
*class_bins, last_bin = bins
for test in tests:
for test_bin, test_class in zip(class_bins, classes):
if isinstance(test, test_class):
break
else:
test_bin = last_bin
test_bin[type(test)].add(test)
for test_bin in bins:
# Call list() since reorder_test_bin()'s input must support reversed().
tests = list(itertools.chain.from_iterable(test_bin.values()))
yield from reorder_test_bin(tests, shuffler=shuffler, reverse=reverse)
def partition_suite_by_case(suite):
"""Partition a test suite by test case, preserving the order of tests."""
suite_class = type(suite)
all_tests = iter_test_cases(suite)
return [suite_class(tests) for _, tests in itertools.groupby(all_tests, type)]
def test_match_tags(test, tags, exclude_tags):
if isinstance(test, unittest.loader._FailedTest):
# Tests that couldn't load always match to prevent tests from falsely
# passing due e.g. to syntax errors.
return True
test_tags = set(getattr(test, "tags", []))
test_fn_name = getattr(test, "_testMethodName", str(test))
if hasattr(test, test_fn_name):
test_fn = getattr(test, test_fn_name)
test_fn_tags = list(getattr(test_fn, "tags", []))
test_tags = test_tags.union(test_fn_tags)
if tags and test_tags.isdisjoint(tags):
return False
return test_tags.isdisjoint(exclude_tags)
def filter_tests_by_tags(tests, tags, exclude_tags):
"""Return the matching tests as an iterator."""
return (test for test in tests if test_match_tags(test, tags, exclude_tags))
|
eb9935d1b5d6e7b3ac617a36c9d3dba112de44c4bd7f9c6697d12f7a66ff8fd7 | import json
import mimetypes
import os
import sys
from copy import copy
from functools import partial
from http import HTTPStatus
from importlib import import_module
from io import BytesIO, IOBase
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
from asgiref.sync import sync_to_async
from django.conf import settings
from django.core.handlers.asgi import ASGIRequest
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import LimitedStream, WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import got_request_exception, request_finished, request_started
from django.db import close_old_connections
from django.http import HttpHeaders, HttpRequest, QueryDict, SimpleCookie
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.regex_helper import _lazy_re_compile
__all__ = (
"AsyncClient",
"AsyncRequestFactory",
"Client",
"RedirectCycleError",
"RequestFactory",
"encode_file",
"encode_multipart",
)
BOUNDARY = "BoUnDaRyStRiNg"
MULTIPART_CONTENT = "multipart/form-data; boundary=%s" % BOUNDARY
CONTENT_TYPE_RE = _lazy_re_compile(r".*; charset=([\w-]+);?")
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
JSON_CONTENT_TYPE_RE = _lazy_re_compile(r"^application\/(.+\+)?json")
class RedirectCycleError(Exception):
"""The test client has been asked to follow a redirect loop."""
def __init__(self, message, last_response):
super().__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(IOBase):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be sought and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in real life.
"""
def __init__(self, initial_bytes=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if initial_bytes is not None:
self.write(initial_bytes)
def __len__(self):
return self.__len
def read(self, size=-1, /):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if size == -1 or size is None:
size = self.__len
assert (
self.__len >= size
), "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(size)
self.__len -= len(content)
return content
def readline(self, size=-1, /):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if size == -1 or size is None:
size = self.__len
assert (
self.__len >= size
), "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.readline(size)
self.__len -= len(content)
return content
def write(self, b, /):
if self.read_started:
raise ValueError("Unable to write a payload after it's been read")
content = force_bytes(b)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
yield from iterable
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
compliance with RFC 9112 Section 6.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b""
if request.method == "HEAD":
if response.streaming:
response.streaming_content = []
else:
response.content = b""
return response
class ClientHandler(BaseHandler):
"""
An HTTP Handler that can be used for testing purposes. Use the WSGI
interface to compose requests, but return the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# Emulate a WSGI server by calling the close method on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close
)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
class AsyncClientHandler(BaseHandler):
"""An async version of ClientHandler."""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
async def __call__(self, scope):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware(is_async=True)
# Extract body file from the scope, if provided.
if "_body_file" in scope:
body_file = scope.pop("_body_file")
else:
body_file = FakePayload("")
request_started.disconnect(close_old_connections)
await sync_to_async(request_started.send, thread_sensitive=False)(
sender=self.__class__, scope=scope
)
request_started.connect(close_old_connections)
# Wrap FakePayload body_file to allow large read() in test environment.
request = ASGIRequest(scope, LimitedStream(body_file, len(body_file)))
# Sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably required
# for backwards compatibility with external tests against admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = await self.get_response_async(request)
# Simulate behaviors of most web servers.
conditional_content_removal(request, response)
# Attach the originating ASGI request to the response so that it could
# be later retrieved.
response.asgi_request = request
# Emulate a server by calling the close method on completion.
if response.streaming:
response.streaming_content = await sync_to_async(
closing_iterator_wrapper, thread_sensitive=False
)(
response.streaming_content,
response.close,
)
else:
request_finished.disconnect(close_old_connections)
# Will fire request_finished.
await sync_to_async(response.close, thread_sensitive=False)()
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Store templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault("templates", []).append(template)
if "context" not in store:
store["context"] = ContextList()
store["context"].append(copy(context))
def encode_multipart(boundary, data):
"""
Encode multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for key, value in data.items():
if value is None:
raise TypeError(
"Cannot encode None for key '%s' as POST data. Did you mean "
"to pass an empty string or omit the value?" % key
)
elif is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, str) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(
to_bytes(val)
for val in [
"--%s" % boundary,
'Content-Disposition: form-data; name="%s"' % key,
"",
item,
]
)
else:
lines.extend(
to_bytes(val)
for val in [
"--%s" % boundary,
'Content-Disposition: form-data; name="%s"' % key,
"",
value,
]
)
lines.extend(
[
to_bytes("--%s--" % boundary),
b"",
]
)
return b"\r\n".join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, "name") and isinstance(file.name, str)
filename = os.path.basename(file.name) if file_has_string_name else ""
if hasattr(file, "content_type"):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = "application/octet-stream"
filename = filename or key
return [
to_bytes("--%s" % boundary),
to_bytes(
'Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)
),
to_bytes("Content-Type: %s" % content_type),
b"",
to_bytes(file.read()),
]
class RequestFactory:
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, *, json_encoder=DjangoJSONEncoder, headers=None, **defaults):
self.json_encoder = json_encoder
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
if headers:
self.defaults.update(HttpHeaders.to_wsgi_names(headers))
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
return {
"HTTP_COOKIE": "; ".join(
sorted(
"%s=%s" % (morsel.key, morsel.coded_value)
for morsel in self.cookies.values()
)
),
"PATH_INFO": "/",
"REMOTE_ADDR": "127.0.0.1",
"REQUEST_METHOD": "GET",
"SCRIPT_NAME": "",
"SERVER_NAME": "testserver",
"SERVER_PORT": "80",
"SERVER_PROTOCOL": "HTTP/1.1",
"wsgi.version": (1, 0),
"wsgi.url_scheme": "http",
"wsgi.input": FakePayload(b""),
"wsgi.errors": self.errors,
"wsgi.multiprocess": True,
"wsgi.multithread": False,
"wsgi.run_once": False,
**self.defaults,
**request,
}
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match[1]
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _encode_json(self, data, content_type):
"""
Return encoded JSON if data is a dict, list, or tuple and content_type
is application/json.
"""
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(
data, (dict, list, tuple)
)
return json.dumps(data, cls=self.json_encoder) if should_encode else data
def _get_path(self, parsed):
path = parsed.path
# If there are parameters, add them
if parsed.params:
path += ";" + parsed.params
path = unquote_to_bytes(path)
# Replace the behavior where non-ASCII values in the WSGI environ are
# arbitrarily decoded with ISO-8859-1.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode("iso-8859-1")
def get(self, path, data=None, secure=False, *, headers=None, **extra):
"""Construct a GET request."""
data = {} if data is None else data
return self.generic(
"GET",
path,
secure=secure,
headers=headers,
**{
"QUERY_STRING": urlencode(data, doseq=True),
**extra,
},
)
def post(
self,
path,
data=None,
content_type=MULTIPART_CONTENT,
secure=False,
*,
headers=None,
**extra,
):
"""Construct a POST request."""
data = self._encode_json({} if data is None else data, content_type)
post_data = self._encode_data(data, content_type)
return self.generic(
"POST",
path,
post_data,
content_type,
secure=secure,
headers=headers,
**extra,
)
def head(self, path, data=None, secure=False, *, headers=None, **extra):
"""Construct a HEAD request."""
data = {} if data is None else data
return self.generic(
"HEAD",
path,
secure=secure,
headers=headers,
**{
"QUERY_STRING": urlencode(data, doseq=True),
**extra,
},
)
def trace(self, path, secure=False, *, headers=None, **extra):
"""Construct a TRACE request."""
return self.generic("TRACE", path, secure=secure, headers=headers, **extra)
def options(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"Construct an OPTIONS request."
return self.generic(
"OPTIONS", path, data, content_type, secure=secure, headers=headers, **extra
)
def put(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a PUT request."""
data = self._encode_json(data, content_type)
return self.generic(
"PUT", path, data, content_type, secure=secure, headers=headers, **extra
)
def patch(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a PATCH request."""
data = self._encode_json(data, content_type)
return self.generic(
"PATCH", path, data, content_type, secure=secure, headers=headers, **extra
)
def delete(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a DELETE request."""
data = self._encode_json(data, content_type)
return self.generic(
"DELETE", path, data, content_type, secure=secure, headers=headers, **extra
)
def generic(
self,
method,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
"PATH_INFO": self._get_path(parsed),
"REQUEST_METHOD": method,
"SERVER_PORT": "443" if secure else "80",
"wsgi.url_scheme": "https" if secure else "http",
}
if data:
r.update(
{
"CONTENT_LENGTH": str(len(data)),
"CONTENT_TYPE": content_type,
"wsgi.input": FakePayload(data),
}
)
if headers:
extra.update(HttpHeaders.to_wsgi_names(headers))
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get("QUERY_STRING"):
# WSGI requires latin-1 encoded strings. See get_path_info().
query_string = parsed[4].encode().decode("iso-8859-1")
r["QUERY_STRING"] = query_string
return self.request(**r)
class AsyncRequestFactory(RequestFactory):
"""
Class that lets you create mock ASGI-like Request objects for use in
testing. Usage:
rf = AsyncRequestFactory()
get_request = await rf.get('/hello/')
post_request = await rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
including synchronous ones. The reason we have a separate class here is:
a) this makes ASGIRequest subclasses, and
b) AsyncTestClient can subclass it.
"""
def _base_scope(self, **request):
"""The base scope for a request."""
# This is a minimal valid ASGI scope, plus:
# - headers['cookie'] for cookie support,
# - 'client' often useful, see #8551.
scope = {
"asgi": {"version": "3.0"},
"type": "http",
"http_version": "1.1",
"client": ["127.0.0.1", 0],
"server": ("testserver", "80"),
"scheme": "http",
"method": "GET",
"headers": [],
**self.defaults,
**request,
}
scope["headers"].append(
(
b"cookie",
b"; ".join(
sorted(
("%s=%s" % (morsel.key, morsel.coded_value)).encode("ascii")
for morsel in self.cookies.values()
)
),
)
)
return scope
def request(self, **request):
"""Construct a generic request object."""
# This is synchronous, which means all methods on this class are.
# AsyncClient, however, has an async request function, which makes all
# its methods async.
if "_body_file" in request:
body_file = request.pop("_body_file")
else:
body_file = FakePayload("")
# Wrap FakePayload body_file to allow large read() in test environment.
return ASGIRequest(
self._base_scope(**request), LimitedStream(body_file, len(body_file))
)
def generic(
self,
method,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy.
data = force_bytes(data, settings.DEFAULT_CHARSET)
s = {
"method": method,
"path": self._get_path(parsed),
"server": ("127.0.0.1", "443" if secure else "80"),
"scheme": "https" if secure else "http",
"headers": [(b"host", b"testserver")],
}
if data:
s["headers"].extend(
[
(b"content-length", str(len(data)).encode("ascii")),
(b"content-type", content_type.encode("ascii")),
]
)
s["_body_file"] = FakePayload(data)
follow = extra.pop("follow", None)
if follow is not None:
s["follow"] = follow
if query_string := extra.pop("QUERY_STRING", None):
s["query_string"] = query_string
if headers:
extra.update(HttpHeaders.to_asgi_names(headers))
s["headers"] += [
(key.lower().encode("ascii"), value.encode("latin1"))
for key, value in extra.items()
]
# If QUERY_STRING is absent or empty, we want to extract it from the
# URL.
if not s.get("query_string"):
s["query_string"] = parsed[4]
return self.request(**s)
class ClientMixin:
"""
Mixin with common methods between Client and AsyncClient.
"""
def store_exc_info(self, **kwargs):
"""Store exceptions when they are generated by a view."""
self.exc_info = sys.exc_info()
def check_exception(self, response):
"""
Look for a signaled exception, clear the current context exception
data, re-raise the signaled exception, and clear the signaled exception
from the local cache.
"""
response.exc_info = self.exc_info
if self.exc_info:
_, exc_value, _ = self.exc_info
self.exc_info = None
if self.raise_request_exception:
raise exc_value
@property
def session(self):
"""Return the current session variables."""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def login(self, **credentials):
"""
Set the Factory to appear as if it has successfully logged into a site.
Return True if login is possible or False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
return False
def force_login(self, user, backend=None):
def get_backend():
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, "get_user"):
return backend_path
if backend is None:
backend = get_backend()
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
"max-age": None,
"path": "/",
"domain": settings.SESSION_COOKIE_DOMAIN,
"secure": settings.SESSION_COOKIE_SECURE or None,
"expires": None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""Log out the user by removing the cookies and session object."""
from django.contrib.auth import get_user, logout
request = HttpRequest()
if self.session:
request.session = self.session
request.user = get_user(request)
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, "_json"):
if not JSON_CONTENT_TYPE_RE.match(response.get("Content-Type")):
raise ValueError(
'Content-Type header is "%s", not "application/json"'
% response.get("Content-Type")
)
response._json = json.loads(
response.content.decode(response.charset), **extra
)
return response._json
class Client(ClientMixin, RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(
self,
enforce_csrf_checks=False,
raise_request_exception=True,
*,
headers=None,
**defaults,
):
super().__init__(headers=headers, **defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
self.headers = None
def request(self, **request):
"""
Make a generic request. Compose the environment dictionary and pass
to the handler, return the result of the handler. Assume defaults for
the query environment, which can be overridden using the arguments to
the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = self.handler(environ)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
urlconf = getattr(response.wsgi_request, "urlconf", None)
response.resolver_match = SimpleLazyObject(
lambda: resolve(request["PATH_INFO"], urlconf=urlconf),
)
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
def get(
self,
path,
data=None,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using GET."""
self.extra = extra
self.headers = headers
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def post(
self,
path,
data=None,
content_type=MULTIPART_CONTENT,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using POST."""
self.extra = extra
self.headers = headers
response = super().post(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def head(
self,
path,
data=None,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using HEAD."""
self.extra = extra
self.headers = headers
response = super().head(
path, data=data, secure=secure, headers=headers, **extra
)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def options(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using OPTIONS."""
self.extra = extra
self.headers = headers
response = super().options(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def put(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a resource to the server using PUT."""
self.extra = extra
self.headers = headers
response = super().put(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def patch(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a resource to the server using PATCH."""
self.extra = extra
self.headers = headers
response = super().patch(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def delete(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a DELETE request to the server."""
self.extra = extra
self.headers = headers
response = super().delete(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def trace(
self,
path,
data="",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a TRACE request to the server."""
self.extra = extra
self.headers = headers
response = super().trace(
path, data=data, secure=secure, headers=headers, **extra
)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def _handle_redirects(
self,
response,
data="",
content_type="",
headers=None,
**extra,
):
"""
Follow any redirects by requesting responses from the server using GET.
"""
response.redirect_chain = []
redirect_status_codes = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
while response.status_code in redirect_status_codes:
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra["wsgi.url_scheme"] = url.scheme
if url.hostname:
extra["SERVER_NAME"] = url.hostname
if url.port:
extra["SERVER_PORT"] = str(url.port)
path = url.path
# RFC 3986 Section 6.2.3: Empty path should be normalized to "/".
if not path and url.netloc:
path = "/"
# Prepend the request path to handle relative path redirects
if not path.startswith("/"):
path = urljoin(response.request["PATH_INFO"], path)
if response.status_code in (
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
):
# Preserve request method and query string (if needed)
# post-redirect for 307/308 responses.
request_method = response.request["REQUEST_METHOD"].lower()
if request_method not in ("get", "head"):
extra["QUERY_STRING"] = url.query
request_method = getattr(self, request_method)
else:
request_method = self.get
data = QueryDict(url.query)
content_type = None
response = request_method(
path,
data=data,
content_type=content_type,
follow=False,
headers=headers,
**extra,
)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError(
"Redirect loop detected.", last_response=response
)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
class AsyncClient(ClientMixin, AsyncRequestFactory):
"""
An async version of Client that creates ASGIRequests and calls through an
async request path.
Does not currently support "follow" on its methods.
"""
def __init__(
self,
enforce_csrf_checks=False,
raise_request_exception=True,
*,
headers=None,
**defaults,
):
super().__init__(headers=headers, **defaults)
self.handler = AsyncClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
self.headers = None
async def request(self, **request):
"""
Make a generic request. Compose the scope dictionary and pass to the
handler, return the result of the handler. Assume defaults for the
query environment, which can be overridden using the arguments to the
request.
"""
if "follow" in request:
raise NotImplementedError(
"AsyncClient request methods do not accept the follow parameter."
)
scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = await self.handler(scope)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
urlconf = getattr(response.asgi_request, "urlconf", None)
response.resolver_match = SimpleLazyObject(
lambda: resolve(request["path"], urlconf=urlconf),
)
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
|
f65a9051e876c690d6f1f2912653c284c774df3a6fe7efb6ac65f39965a4073d | import difflib
import json
import logging
import posixpath
import sys
import threading
import unittest
import warnings
from collections import Counter
from contextlib import contextmanager
from copy import copy, deepcopy
from difflib import get_close_matches
from functools import wraps
from unittest.suite import _DebugResult
from unittest.util import safe_repr
from urllib.parse import (
parse_qsl,
unquote,
urlencode,
urljoin,
urlparse,
urlsplit,
urlunparse,
)
from urllib.request import url2pathname
from asgiref.sync import async_to_sync, iscoroutinefunction
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler
from django.core.signals import setting_changed
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.http.request import split_domain_port, validate_host
from django.test.client import AsyncClient, Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import template_rendered
from django.test.utils import (
CaptureQueriesContext,
ContextList,
compare_xml,
modify_settings,
override_settings,
)
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.functional import classproperty
from django.views.static import serve
logger = logging.getLogger("django.test")
__all__ = (
"TestCase",
"TransactionTestCase",
"SimpleTestCase",
"skipIfDBFeature",
"skipUnlessDBFeature",
)
def to_list(value):
"""Put value into a list if it's not already one."""
if not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = "%s\n%s" % (msg, e)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super().__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed,
self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s"
% (
executed,
self.num,
"\n".join(
"%d. %s" % (i, query["sql"])
for i, query in enumerate(self.captured_queries, start=1)
),
),
)
class _AssertTemplateUsedContext:
def __init__(self, test_case, template_name, msg_prefix="", count=None):
self.test_case = test_case
self.template_name = template_name
self.msg_prefix = msg_prefix
self.count = count
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
self.test_case._assert_template_used(
self.template_name,
self.rendered_template_names,
self.msg_prefix,
self.count,
)
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
self.test()
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
self.test_case.assertFalse(
self.template_name in self.rendered_template_names,
f"{self.msg_prefix}Template '{self.template_name}' was used "
f"unexpectedly in rendering the response",
)
class DatabaseOperationForbidden(AssertionError):
pass
class _DatabaseFailure:
def __init__(self, wrapped, message):
self.wrapped = wrapped
self.message = message
def __call__(self):
raise DatabaseOperationForbidden(self.message)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
async_client_class = AsyncClient
_overridden_settings = None
_modified_settings = None
databases = set()
_disallowed_database_msg = (
"Database %(operation)s to %(alias)r are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to ensure "
"proper test isolation or add %(alias)r to %(test)s.databases to silence "
"this failure."
)
_disallowed_connection_methods = [
("connect", "connections"),
("temporary_connection", "connections"),
("cursor", "queries"),
("chunked_cursor", "queries"),
]
@classmethod
def setUpClass(cls):
super().setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
cls.addClassCleanup(cls._cls_overridden_context.disable)
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
cls.addClassCleanup(cls._cls_modified_context.disable)
cls._add_databases_failures()
cls.addClassCleanup(cls._remove_databases_failures)
@classmethod
def _validate_databases(cls):
if cls.databases == "__all__":
return frozenset(connections)
for alias in cls.databases:
if alias not in connections:
message = (
"%s.%s.databases refers to %r which is not defined in "
"settings.DATABASES."
% (
cls.__module__,
cls.__qualname__,
alias,
)
)
close_matches = get_close_matches(alias, list(connections))
if close_matches:
message += " Did you mean %r?" % close_matches[0]
raise ImproperlyConfigured(message)
return frozenset(cls.databases)
@classmethod
def _add_databases_failures(cls):
cls.databases = cls._validate_databases()
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, operation in cls._disallowed_connection_methods:
message = cls._disallowed_database_msg % {
"test": "%s.%s" % (cls.__module__, cls.__qualname__),
"alias": alias,
"operation": operation,
}
method = getattr(connection, name)
setattr(connection, name, _DatabaseFailure(method, message))
@classmethod
def _remove_databases_failures(cls):
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, _ in cls._disallowed_connection_methods:
method = getattr(connection, name)
setattr(connection, name, method.wrapped)
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
self._setup_and_call(result)
def debug(self):
"""Perform the same as __call__(), without catching the exception."""
debug_result = _DebugResult()
self._setup_and_call(debug_result, debug=True)
def _setup_and_call(self, result, debug=False):
"""
Perform the following in order: pre-setup, run test, post-teardown,
skipping pre/post hooks if test is set to be skipped.
If debug=True, reraise any errors in setup and use super().debug()
instead of __call__() to run the test.
"""
testMethod = getattr(self, self._testMethodName)
skipped = getattr(self.__class__, "__unittest_skip__", False) or getattr(
testMethod, "__unittest_skip__", False
)
# Convert async test methods.
if iscoroutinefunction(testMethod):
setattr(self, self._testMethodName, async_to_sync(testMethod))
if not skipped:
try:
self._pre_setup()
except Exception:
if debug:
raise
result.addError(self, sys.exc_info())
return
if debug:
super().debug()
else:
super().__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
if debug:
raise
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""
Perform pre-test setup:
* Create a test client.
* Clear the mail test outbox.
"""
self.client = self.client_class()
self.async_client = self.async_client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the
original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(
self,
response,
expected_url,
status_code=302,
target_status_code=200,
msg_prefix="",
fetch_redirect_response=True,
):
"""
Assert that a response redirected to a specific URL and that the
redirect URL can be loaded.
Won't work for external links since it uses the test client to do a
request (use fetch_redirect_response=False to check such links without
fetching them).
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, "redirect_chain"):
# The request was a followed redirect
self.assertTrue(
response.redirect_chain,
msg_prefix
+ (
"Response didn't redirect as expected: Response code was %d "
"(expected %d)"
)
% (response.status_code, status_code),
)
self.assertEqual(
response.redirect_chain[0][1],
status_code,
msg_prefix
+ (
"Initial response didn't redirect as expected: Response code was "
"%d (expected %d)"
)
% (response.redirect_chain[0][1], status_code),
)
url, status_code = response.redirect_chain[-1]
self.assertEqual(
response.status_code,
target_status_code,
msg_prefix
+ (
"Response didn't redirect as expected: Final Response code was %d "
"(expected %d)"
)
% (response.status_code, target_status_code),
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code,
status_code,
msg_prefix
+ (
"Response didn't redirect as expected: Response code was %d "
"(expected %d)"
)
% (response.status_code, status_code),
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith("/"):
url = urljoin(response.request["PATH_INFO"], url)
path = urljoin(response.request["PATH_INFO"], path)
if fetch_redirect_response:
# netloc might be empty, or in cases where Django tests the
# HTTP scheme, the convention is for netloc to be 'testserver'.
# Trust both as "internal" URLs here.
domain, port = split_domain_port(netloc)
if domain and not validate_host(domain, settings.ALLOWED_HOSTS):
raise ValueError(
"The test client is unable to fetch remote URLs (got %s). "
"If the host is served by Django, add '%s' to ALLOWED_HOSTS. "
"Otherwise, use "
"assertRedirects(..., fetch_redirect_response=False)."
% (url, domain)
)
# Get the redirection page, using the same client that was used
# to obtain the original response.
extra = response.client.extra or {}
headers = response.client.headers or {}
redirect_response = response.client.get(
path,
QueryDict(query),
secure=(scheme == "https"),
headers=headers,
**extra,
)
self.assertEqual(
redirect_response.status_code,
target_status_code,
msg_prefix
+ (
"Couldn't retrieve redirection page '%s': response code was %d "
"(expected %d)"
)
% (path, redirect_response.status_code, target_status_code),
)
self.assertURLEqual(
url,
expected_url,
msg_prefix
+ "Response redirected to '%s', expected '%s'" % (url, expected_url),
)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are the same, ignoring the order of query string
parameters except for parameters with the same name.
For example, /path/?x=1&y=2 is equal to /path/?y=2&x=1, but
/path/?a=1&a=2 isn't equal to /path/?a=2&a=1.
"""
def normalize(url):
"""Sort the URL's query string parameters."""
url = str(url) # Coerce reverse_lazy() URLs.
scheme, netloc, path, params, query, fragment = urlparse(url)
query_parts = sorted(parse_qsl(query))
return urlunparse(
(scheme, netloc, path, params, urlencode(query_parts), fragment)
)
self.assertEqual(
normalize(url1),
normalize(url2),
msg_prefix + "Expected '%s' to equal '%s'." % (url1, url2),
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if (
hasattr(response, "render")
and callable(response.render)
and not response.is_rendered
):
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code,
status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code),
)
if response.streaming:
content = b"".join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = str(text)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(
self, content, None, "Response's content is not valid HTML:"
)
text = assert_and_parse_html(
self, text, None, "Second argument is not valid HTML:"
)
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(
self, response, text, count=None, status_code=200, msg_prefix="", html=False
):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html
)
if count is not None:
self.assertEqual(
real_count,
count,
msg_prefix
+ "Found %d instances of %s in response (expected %d)"
% (real_count, text_repr, count),
)
else:
self.assertTrue(
real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr
)
def assertNotContains(
self, response, text, status_code=200, msg_prefix="", html=False
):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` doesn't occur in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html
)
self.assertEqual(
real_count, 0, msg_prefix + "Response should not contain %s" % text_repr
)
def _check_test_client_response(self, response, attribute, method_name):
"""
Raise a ValueError if the given response doesn't have the required
attribute.
"""
if not hasattr(response, attribute):
raise ValueError(
f"{method_name}() is only usable on responses fetched using "
"the Django test Client."
)
def _assert_form_error(self, form, field, errors, msg_prefix, form_repr):
if not form.is_bound:
self.fail(
f"{msg_prefix}The {form_repr} is not bound, it will never have any "
f"errors."
)
if field is not None and field not in form.fields:
self.fail(
f"{msg_prefix}The {form_repr} does not contain the field {field!r}."
)
if field is None:
field_errors = form.non_field_errors()
failure_message = f"The non-field errors of {form_repr} don't match."
else:
field_errors = form.errors.get(field, [])
failure_message = (
f"The errors of field {field!r} on {form_repr} don't match."
)
self.assertEqual(field_errors, errors, msg_prefix + failure_message)
def assertFormError(self, form, field, errors, msg_prefix=""):
"""
Assert that a field named "field" on the given form object has specific
errors.
errors can be either a single error message or a list of errors
messages. Using errors=[] test that the field has no errors.
You can pass field=None to check the form's non-field errors.
"""
if msg_prefix:
msg_prefix += ": "
errors = to_list(errors)
self._assert_form_error(form, field, errors, msg_prefix, f"form {form!r}")
# RemovedInDjango51Warning.
def assertFormsetError(self, *args, **kw):
warnings.warn(
"assertFormsetError() is deprecated in favor of assertFormSetError().",
category=RemovedInDjango51Warning,
stacklevel=2,
)
return self.assertFormSetError(*args, **kw)
def assertFormSetError(self, formset, form_index, field, errors, msg_prefix=""):
"""
Similar to assertFormError() but for formsets.
Use form_index=None to check the formset's non-form errors (in that
case, you must also use field=None).
Otherwise use an integer to check the formset's n-th form for errors.
Other parameters are the same as assertFormError().
"""
if form_index is None and field is not None:
raise ValueError("You must use field=None with form_index=None.")
if msg_prefix:
msg_prefix += ": "
errors = to_list(errors)
if not formset.is_bound:
self.fail(
f"{msg_prefix}The formset {formset!r} is not bound, it will never have "
f"any errors."
)
if form_index is not None and form_index >= formset.total_form_count():
form_count = formset.total_form_count()
form_or_forms = "forms" if form_count > 1 else "form"
self.fail(
f"{msg_prefix}The formset {formset!r} only has {form_count} "
f"{form_or_forms}."
)
if form_index is not None:
form_repr = f"form {form_index} of formset {formset!r}"
self._assert_form_error(
formset.forms[form_index], field, errors, msg_prefix, form_repr
)
else:
failure_message = f"The non-form errors of formset {formset!r} don't match."
self.assertEqual(
formset.non_form_errors(), errors, msg_prefix + failure_message
)
def _get_template_used(self, response, template_name, msg_prefix, method_name):
if response is None and template_name is None:
raise TypeError("response and/or template_name argument must be provided")
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None:
self._check_test_client_response(response, "templates", method_name)
if not hasattr(response, "templates") or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def _assert_template_used(self, template_name, template_names, msg_prefix, count):
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ", ".join(template_names)),
)
if count is not None:
self.assertEqual(
template_names.count(template_name),
count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name)),
)
def assertTemplateUsed(
self, response=None, template_name=None, msg_prefix="", count=None
):
"""
Assert that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._get_template_used(
response,
template_name,
msg_prefix,
"assertTemplateUsed",
)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(
self, context_mgr_template, msg_prefix, count
)
self._assert_template_used(template_name, template_names, msg_prefix, count)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=""):
"""
Assert that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._get_template_used(
response,
template_name,
msg_prefix,
"assertTemplateNotUsed",
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template, msg_prefix)
self.assertFalse(
template_name in template_names,
msg_prefix
+ "Template '%s' was used unexpectedly in rendering the response"
% template_name,
)
@contextmanager
def _assert_raises_or_warns_cm(
self, func, cm_attr, expected_exception, expected_message
):
with func(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(getattr(cm, cm_attr)))
def _assertFooMessage(
self, func, cm_attr, expected_exception, expected_message, *args, **kwargs
):
callable_obj = None
if args:
callable_obj, *args = args
cm = self._assert_raises_or_warns_cm(
func, cm_attr, expected_exception, expected_message
)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertRaisesMessage(
self, expected_exception, expected_message, *args, **kwargs
):
"""
Assert that expected_message is found in the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
return self._assertFooMessage(
self.assertRaises,
"exception",
expected_exception,
expected_message,
*args,
**kwargs,
)
def assertWarnsMessage(self, expected_warning, expected_message, *args, **kwargs):
"""
Same as assertRaisesMessage but for assertWarns() instead of
assertRaises().
"""
return self._assertFooMessage(
self.assertWarns,
"warning",
expected_warning,
expected_message,
*args,
**kwargs,
)
def assertFieldOutput(
self,
fieldclass,
valid,
invalid,
field_args=None,
field_kwargs=None,
empty_value="",
):
"""
Assert that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **{**field_kwargs, "required": False})
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [required.error_messages["required"]]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({"min_length": 2, "max_length": 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Assert that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(
self, html1, msg, "First argument is not valid HTML:"
)
dom2 = assert_and_parse_html(
self, html2, msg, "Second argument is not valid HTML:"
)
if dom1 != dom2:
standardMsg = "%s != %s" % (safe_repr(dom1, True), safe_repr(dom2, True))
diff = "\n" + "\n".join(
difflib.ndiff(
str(dom1).splitlines(),
str(dom2).splitlines(),
)
)
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Assert that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(
self, html1, msg, "First argument is not valid HTML:"
)
dom2 = assert_and_parse_html(
self, html2, msg, "Second argument is not valid HTML:"
)
if dom1 == dom2:
standardMsg = "%s == %s" % (safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=""):
needle = assert_and_parse_html(
self, needle, None, "First argument is not valid HTML:"
)
haystack = assert_and_parse_html(
self, haystack, None, "Second argument is not valid HTML:"
)
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count,
count,
msg_prefix
+ "Found %d instances of '%s' in response (expected %d)"
% (real_count, needle, count),
)
else:
self.assertTrue(
real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle
)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except json.JSONDecodeError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are semantically the same.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = "First or second argument is not valid XML\n%s" % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = "%s != %s" % (
safe_repr(xml1, True),
safe_repr(xml2, True),
)
diff = "\n" + "\n".join(
difflib.ndiff(xml1.splitlines(), xml2.splitlines())
)
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = "First or second argument is not valid XML\n%s" % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = "%s == %s" % (
safe_repr(xml1, True),
safe_repr(xml2, True),
)
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
databases = {DEFAULT_DB_ALIAS}
_disallowed_database_msg = (
"Database %(operation)s to %(alias)r are not allowed in this test. "
"Add %(alias)r to %(test)s.databases to ensure proper test isolation "
"and silence this failure."
)
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
def _pre_setup(self):
"""
Perform pre-test setup:
* If the class has an 'available_apps' attribute, restrict the app
registry to these applications, then fire the post_migrate signal --
it must run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, install those fixtures.
"""
super()._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting="INSTALLED_APPS",
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting="INSTALLED_APPS",
value=settings.INSTALLED_APPS,
enter=False,
)
raise
# Clear the queries_log so that it's less likely to overflow (a single
# test probably won't execute 9K queries). If queries_log overflows,
# then assertNumQueries() doesn't work.
for db_name in self._databases_names(include_mirrors=False):
connections[db_name].queries_log.clear()
@classmethod
def _databases_names(cls, include_mirrors=True):
# Only consider allowed database aliases, including mirrors or not.
return [
alias
for alias in connections
if alias in cls.databases
and (
include_mirrors
or not connections[alias].settings_dict["TEST"]["MIRROR"]
)
]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list()
)
if sql_list:
with transaction.atomic(using=db_name):
with conn.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# Provide replica initial data from migrated apps, if needed.
if self.serialized_rollback and hasattr(
connections[db_name], "_test_serialized_contents"
):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command(
"loaddata", *self.fixtures, **{"verbosity": 0, "database": db_name}
)
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""
Perform post-test things:
* Flush the contents of the database to leave a clean slate. If the
class has an 'available_apps' attribute, don't fire post_migrate.
* Force-close the connection so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super()._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all(initialized_only=True):
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting="INSTALLED_APPS",
value=settings.INSTALLED_APPS,
enter=False,
)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None
or ( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback
and hasattr(connections[db_name], "_test_serialized_contents")
)
)
call_command(
"flush",
verbosity=0,
interactive=False,
database=db_name,
reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate,
)
# RemovedInDjango51Warning.
def assertQuerysetEqual(self, *args, **kw):
warnings.warn(
"assertQuerysetEqual() is deprecated in favor of assertQuerySetEqual().",
category=RemovedInDjango51Warning,
stacklevel=2,
)
return self.assertQuerySetEqual(*args, **kw)
def assertQuerySetEqual(self, qs, values, transform=None, ordered=True, msg=None):
values = list(values)
items = qs
if transform is not None:
items = map(transform, items)
if not ordered:
return self.assertDictEqual(Counter(items), Counter(values), msg=msg)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, "ordered") and not qs.ordered:
raise ValueError(
"Trying to compare non-ordered queryset against more than one "
"ordered value."
)
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs):
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions(aliases=None):
"""
Return whether or not all (or specified) connections support
transactions.
"""
conns = (
connections.all()
if aliases is None
else (connections[alias] for alias in aliases)
)
return all(conn.features.supports_transactions for conn in conns)
class TestData:
"""
Descriptor to provide TestCase instance isolation for attributes assigned
during the setUpTestData() phase.
Allow safe alteration of objects assigned in setUpTestData() by test
methods by exposing deep copies instead of the original objects.
Objects are deep copied using a memo kept on the test case instance in
order to maintain their original relationships.
"""
memo_attr = "_testdata_memo"
def __init__(self, name, data):
self.name = name
self.data = data
def get_memo(self, testcase):
try:
memo = getattr(testcase, self.memo_attr)
except AttributeError:
memo = {}
setattr(testcase, self.memo_attr, memo)
return memo
def __get__(self, instance, owner):
if instance is None:
return self.data
memo = self.get_memo(instance)
data = deepcopy(self.data, memo)
setattr(instance, self.name, data)
return data
def __repr__(self):
return "<TestData: name=%r, data=%r>" % (self.name, self.data)
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but use `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Open atomic blocks for multiple databases."""
atomics = {}
for db_name in cls._databases_names():
atomic = transaction.atomic(using=db_name)
atomic._from_testcase = True
atomic.__enter__()
atomics[db_name] = atomic
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened by the previous method."""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def _databases_support_transactions(cls):
return connections_support_transactions(cls.databases)
@classmethod
def setUpClass(cls):
super().setUpClass()
if not cls._databases_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command(
"loaddata",
*cls.fixtures,
**{"verbosity": 0, "database": db_name},
)
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
pre_attrs = cls.__dict__.copy()
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
for name, value in cls.__dict__.items():
if value is not pre_attrs.get(name):
setattr(cls, name, TestData(name, value))
@classmethod
def tearDownClass(cls):
if cls._databases_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all(initialized_only=True):
conn.close()
super().tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase."""
pass
def _should_reload_connections(self):
if self._databases_support_transactions():
return False
return super()._should_reload_connections()
def _fixture_setup(self):
if not self._databases_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super()._fixture_setup()
if self.reset_sequences:
raise TypeError("reset_sequences cannot be used on TestCase instances")
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not self._databases_support_transactions():
return super()._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks
and not connection.needs_rollback
and connection.is_usable()
)
@classmethod
@contextmanager
def captureOnCommitCallbacks(cls, *, using=DEFAULT_DB_ALIAS, execute=False):
"""Context manager to capture transaction.on_commit() callbacks."""
callbacks = []
start_count = len(connections[using].run_on_commit)
try:
yield callbacks
finally:
while True:
callback_count = len(connections[using].run_on_commit)
for _, callback, robust in connections[using].run_on_commit[
start_count:
]:
callbacks.append(callback)
if execute:
if robust:
try:
callback()
except Exception as e:
logger.error(
f"Error calling {callback.__qualname__} in "
f"on_commit() (%s).",
e,
exc_info=True,
)
else:
callback()
if callback_count == len(connections[using].run_on_commit):
break
start_count = callback_count
class CheckCondition:
"""Descriptor class for deferred condition checking."""
def __init__(self, *conditions):
self.conditions = conditions
def add_condition(self, condition, reason):
return self.__class__(*self.conditions, (condition, reason))
def __get__(self, instance, cls=None):
# Trigger access for all bases.
if any(getattr(base, "__unittest_skip__", False) for base in cls.__bases__):
return True
for condition, reason in self.conditions:
if condition():
# Override this descriptor's value and set the skip reason.
cls.__unittest_skip__ = True
cls.__unittest_skip_why__ = reason
return True
return False
def _deferredSkip(condition, reason, name):
def decorator(test_func):
nonlocal condition
if not (
isinstance(test_func, type) and issubclass(test_func, unittest.TestCase)
):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if (
args
and isinstance(args[0], unittest.TestCase)
and connection.alias not in getattr(args[0], "databases", {})
):
raise ValueError(
"%s cannot be used on %s as %s doesn't allow queries "
"against the %r database."
% (
name,
args[0],
args[0].__class__.__qualname__,
connection.alias,
)
)
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
databases = getattr(test_item, "databases", None)
if not databases or connection.alias not in databases:
# Defer raising to allow importing test class's module.
def condition():
raise ValueError(
"%s cannot be used on %s as it doesn't allow queries "
"against the '%s' database."
% (
name,
test_item,
connection.alias,
)
)
# Retrieve the possibly existing value from the class's dict to
# avoid triggering the descriptor.
skip = test_func.__dict__.get("__unittest_skip__")
if isinstance(skip, CheckCondition):
test_item.__unittest_skip__ = skip.add_condition(condition, reason)
elif skip is not True:
test_item.__unittest_skip__ = CheckCondition((condition, reason))
return test_item
return decorator
def skipIfDBFeature(*features):
"""Skip a test if a database has at least one of the named features."""
return _deferredSkip(
lambda: any(
getattr(connection.features, feature, False) for feature in features
),
"Database has feature(s) %s" % ", ".join(features),
"skipIfDBFeature",
)
def skipUnlessDBFeature(*features):
"""Skip a test unless a database has all the named features."""
return _deferredSkip(
lambda: not all(
getattr(connection.features, feature, False) for feature in features
),
"Database doesn't support feature(s): %s" % ", ".join(features),
"skipUnlessDBFeature",
)
def skipUnlessAnyDBFeature(*features):
"""Skip a test unless a database has any of the named features."""
return _deferredSkip(
lambda: not any(
getattr(connection.features, feature, False) for feature in features
),
"Database doesn't support any of the feature(s): %s" % ", ".join(features),
"skipUnlessAnyDBFeature",
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
A WSGIRequestHandler that doesn't log to standard output any of the
requests received, so as to not clutter the test result output.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""Return the relative path to the file on disk for the given URL."""
relative_url = url.removeprefix(self.base_url[2])
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super().get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace("\\", "/").lstrip("/")
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""Thread for running a live HTTP server while the tests are running."""
server_class = ThreadedWSGIServer
def __init__(self, host, static_handler, connections_override=None, port=0):
self.host = host
self.port = port
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super().__init__()
def run(self):
"""
Set up the live server and databases, and then loop over handling
HTTP requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
self.httpd = self._create_server(
connections_override=self.connections_override,
)
# If binding to port zero, assign the port allocated by the OS.
if self.port == 0:
self.port = self.httpd.server_address[1]
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
finally:
connections.close_all()
def _create_server(self, connections_override=None):
return self.server_class(
(self.host, self.port),
QuietWSGIRequestHandler,
allow_reuse_address=False,
connections_override=connections_override,
)
def terminate(self):
if hasattr(self, "httpd"):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
self.join()
class LiveServerTestCase(TransactionTestCase):
"""
Do basically the same as TransactionTestCase but also launch a live HTTP
server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
It inherits from TransactionTestCase instead of TestCase because the
threads don't share the same transactions (unless if using in-memory sqlite)
and each thread needs to commit all their transactions so that the other
thread can see the changes.
"""
host = "localhost"
port = 0
server_thread_class = LiveServerThread
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return "http://%s:%s" % (cls.host, cls.server_thread.port)
@classproperty
def allowed_host(cls):
return cls.host
@classmethod
def _make_connections_override(cls):
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == "sqlite" and conn.is_in_memory_db():
connections_override[conn.alias] = conn
return connections_override
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._live_server_modified_settings = modify_settings(
ALLOWED_HOSTS={"append": cls.allowed_host},
)
cls._live_server_modified_settings.enable()
cls.addClassCleanup(cls._live_server_modified_settings.disable)
cls._start_server_thread()
@classmethod
def _start_server_thread(cls):
connections_override = cls._make_connections_override()
for conn in connections_override.values():
# Explicitly enable thread-shareability for this connection.
conn.inc_thread_sharing()
cls.server_thread = cls._create_server_thread(connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
cls.addClassCleanup(cls._terminate_thread)
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, connections_override):
return cls.server_thread_class(
cls.host,
cls.static_handler,
connections_override=connections_override,
port=cls.port,
)
@classmethod
def _terminate_thread(cls):
# Terminate the live server's thread.
cls.server_thread.terminate()
# Restore shared connections' non-shareability.
for conn in cls.server_thread.connections_override.values():
conn.dec_thread_sharing()
class SerializeMixin:
"""
Enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass()/tearDownClass().
"""
lockfile = None
def __init_subclass__(cls, /, **kwargs):
super().__init_subclass__(**kwargs)
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__)
)
@classmethod
def setUpClass(cls):
cls._lockfile = open(cls.lockfile)
cls.addClassCleanup(cls._lockfile.close)
locks.lock(cls._lockfile, locks.LOCK_EX)
super().setUpClass()
|
02734a457e8291701a9df854554e6f1df00c03c06b51261d0e62427e446d2242 | import collections
import logging
import os
import re
import sys
import time
import warnings
from contextlib import contextmanager
from functools import wraps
from io import StringIO
from itertools import chain
from types import SimpleNamespace
from unittest import TestCase, skipIf, skipUnless
from xml.dom.minidom import Node, parseString
from asgiref.sync import iscoroutinefunction
from django.apps import apps
from django.apps.registry import Apps
from django.conf import UserSettingsHolder, settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import request_started, setting_changed
from django.db import DEFAULT_DB_ALIAS, connections, reset_queries
from django.db.models.options import Options
from django.template import Template
from django.test.signals import template_rendered
from django.urls import get_script_prefix, set_script_prefix
from django.utils.translation import deactivate
try:
import jinja2
except ImportError:
jinja2 = None
__all__ = (
"Approximate",
"ContextList",
"isolate_lru_cache",
"get_runner",
"CaptureQueriesContext",
"ignore_warnings",
"isolate_apps",
"modify_settings",
"override_settings",
"override_system_checks",
"tag",
"requires_tz_support",
"setup_databases",
"setup_test_environment",
"teardown_test_environment",
)
TZ_SUPPORT = hasattr(time, "tzset")
class Approximate:
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
return self.val == other or round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""
A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, str):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super().__getitem__(key)
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
return set(chain.from_iterable(d for subcontext in self for d in subcontext))
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal that can be
intercepted by the test Client.
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
class _TestState:
pass
def setup_test_environment(debug=None):
"""
Perform global pre-test setup, such as installing the instrumented template
renderer and setting the email backend to the locmem email backend.
"""
if hasattr(_TestState, "saved_data"):
# Executing this function twice would overwrite the saved values.
raise RuntimeError(
"setup_test_environment() was already called and can't be called "
"again without first calling teardown_test_environment()."
)
if debug is None:
debug = settings.DEBUG
saved_data = SimpleNamespace()
_TestState.saved_data = saved_data
saved_data.allowed_hosts = settings.ALLOWED_HOSTS
# Add the default host of the test client.
settings.ALLOWED_HOSTS = [*settings.ALLOWED_HOSTS, "testserver"]
saved_data.debug = settings.DEBUG
settings.DEBUG = debug
saved_data.email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
saved_data.template_render = Template._render
Template._render = instrumented_test_render
mail.outbox = []
deactivate()
def teardown_test_environment():
"""
Perform any global post-test teardown, such as restoring the original
template renderer and restoring the email sending functions.
"""
saved_data = _TestState.saved_data
settings.ALLOWED_HOSTS = saved_data.allowed_hosts
settings.DEBUG = saved_data.debug
settings.EMAIL_BACKEND = saved_data.email_backend
Template._render = saved_data.template_render
del _TestState.saved_data
del mail.outbox
def setup_databases(
verbosity,
interactive,
*,
time_keeper=None,
keepdb=False,
debug_sql=False,
parallel=0,
aliases=None,
serialized_aliases=None,
**kwargs,
):
"""Create the test databases."""
if time_keeper is None:
time_keeper = NullTimeKeeper()
test_databases, mirrored_aliases = get_unique_databases_and_mirrors(aliases)
old_names = []
for db_name, aliases in test_databases.values():
first_alias = None
for alias in aliases:
connection = connections[alias]
old_names.append((connection, db_name, first_alias is None))
# Actually create the database for the first connection
if first_alias is None:
first_alias = alias
with time_keeper.timed(" Creating '%s'" % alias):
serialize_alias = (
serialized_aliases is None or alias in serialized_aliases
)
connection.creation.create_test_db(
verbosity=verbosity,
autoclobber=not interactive,
keepdb=keepdb,
serialize=serialize_alias,
)
if parallel > 1:
for index in range(parallel):
with time_keeper.timed(" Cloning '%s'" % alias):
connection.creation.clone_test_db(
suffix=str(index + 1),
verbosity=verbosity,
keepdb=keepdb,
)
# Configure all other connections as mirrors of the first one
else:
connections[alias].creation.set_as_test_mirror(
connections[first_alias].settings_dict
)
# Configure the test mirrors.
for alias, mirror_alias in mirrored_aliases.items():
connections[alias].creation.set_as_test_mirror(
connections[mirror_alias].settings_dict
)
if debug_sql:
for alias in connections:
connections[alias].force_debug_cursor = True
return old_names
def iter_test_cases(tests):
"""
Return an iterator over a test suite's unittest.TestCase objects.
The tests argument can also be an iterable of TestCase objects.
"""
for test in tests:
if isinstance(test, str):
# Prevent an unfriendly RecursionError that can happen with
# strings.
raise TypeError(
f"Test {test!r} must be a test case or test suite not string "
f"(was found in {tests!r})."
)
if isinstance(test, TestCase):
yield test
else:
# Otherwise, assume it is a test suite.
yield from iter_test_cases(test)
def dependency_ordered(test_databases, dependencies):
"""
Reorder test_databases into an order that honors the dependencies
described in TEST[DEPENDENCIES].
"""
ordered_test_databases = []
resolved_databases = set()
# Maps db signature to dependencies of all its aliases
dependencies_map = {}
# Check that no database depends on its own alias
for sig, (_, aliases) in test_databases:
all_deps = set()
for alias in aliases:
all_deps.update(dependencies.get(alias, []))
if not all_deps.isdisjoint(aliases):
raise ImproperlyConfigured(
"Circular dependency: databases %r depend on each other, "
"but are aliases." % aliases
)
dependencies_map[sig] = all_deps
while test_databases:
changed = False
deferred = []
# Try to find a DB that has all its dependencies met
for signature, (db_name, aliases) in test_databases:
if dependencies_map[signature].issubset(resolved_databases):
resolved_databases.update(aliases)
ordered_test_databases.append((signature, (db_name, aliases)))
changed = True
else:
deferred.append((signature, (db_name, aliases)))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST[DEPENDENCIES]")
test_databases = deferred
return ordered_test_databases
def get_unique_databases_and_mirrors(aliases=None):
"""
Figure out which databases actually need to be created.
Deduplicate entries in DATABASES that correspond the same database or are
configured as test mirrors.
Return two values:
- test_databases: ordered mapping of signatures to (name, list of aliases)
where all aliases share the same underlying database.
- mirrored_aliases: mapping of mirror aliases to original aliases.
"""
if aliases is None:
aliases = connections
mirrored_aliases = {}
test_databases = {}
dependencies = {}
default_sig = connections[DEFAULT_DB_ALIAS].creation.test_db_signature()
for alias in connections:
connection = connections[alias]
test_settings = connection.settings_dict["TEST"]
if test_settings["MIRROR"]:
# If the database is marked as a test mirror, save the alias.
mirrored_aliases[alias] = test_settings["MIRROR"]
elif alias in aliases:
# Store a tuple with DB parameters that uniquely identify it.
# If we have two aliases with the same values for that tuple,
# we only need to create the test database once.
item = test_databases.setdefault(
connection.creation.test_db_signature(),
(connection.settings_dict["NAME"], []),
)
# The default database must be the first because data migrations
# use the default alias by default.
if alias == DEFAULT_DB_ALIAS:
item[1].insert(0, alias)
else:
item[1].append(alias)
if "DEPENDENCIES" in test_settings:
dependencies[alias] = test_settings["DEPENDENCIES"]
else:
if (
alias != DEFAULT_DB_ALIAS
and connection.creation.test_db_signature() != default_sig
):
dependencies[alias] = test_settings.get(
"DEPENDENCIES", [DEFAULT_DB_ALIAS]
)
test_databases = dict(dependency_ordered(test_databases.items(), dependencies))
return test_databases, mirrored_aliases
def teardown_databases(old_config, verbosity, parallel=0, keepdb=False):
"""Destroy all the non-mirror databases."""
for connection, old_name, destroy in old_config:
if destroy:
if parallel > 1:
for index in range(parallel):
connection.creation.destroy_test_db(
suffix=str(index + 1),
verbosity=verbosity,
keepdb=keepdb,
)
connection.creation.destroy_test_db(old_name, verbosity, keepdb)
def get_runner(settings, test_runner_class=None):
test_runner_class = test_runner_class or settings.TEST_RUNNER
test_path = test_runner_class.split(".")
# Allow for relative paths
if len(test_path) > 1:
test_module_name = ".".join(test_path[:-1])
else:
test_module_name = "."
test_module = __import__(test_module_name, {}, {}, test_path[-1])
return getattr(test_module, test_path[-1])
class TestContextDecorator:
"""
A base class that can either be used as a context manager during tests
or as a test function or unittest.TestCase subclass decorator to perform
temporary alterations.
`attr_name`: attribute assigned the return value of enable() if used as
a class decorator.
`kwarg_name`: keyword argument passing the return value of enable() if
used as a function decorator.
"""
def __init__(self, attr_name=None, kwarg_name=None):
self.attr_name = attr_name
self.kwarg_name = kwarg_name
def enable(self):
raise NotImplementedError
def disable(self):
raise NotImplementedError
def __enter__(self):
return self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def decorate_class(self, cls):
if issubclass(cls, TestCase):
decorated_setUp = cls.setUp
def setUp(inner_self):
context = self.enable()
inner_self.addCleanup(self.disable)
if self.attr_name:
setattr(inner_self, self.attr_name, context)
decorated_setUp(inner_self)
cls.setUp = setUp
return cls
raise TypeError("Can only decorate subclasses of unittest.TestCase")
def decorate_callable(self, func):
if iscoroutinefunction(func):
# If the inner function is an async function, we must execute async
# as well so that the `with` statement executes at the right time.
@wraps(func)
async def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return await func(*args, **kwargs)
else:
@wraps(func)
def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return func(*args, **kwargs)
return inner
def __call__(self, decorated):
if isinstance(decorated, type):
return self.decorate_class(decorated)
elif callable(decorated):
return self.decorate_callable(decorated)
raise TypeError("Cannot decorate object of type %s" % type(decorated))
class override_settings(TestContextDecorator):
"""
Act as either a decorator or a context manager. If it's a decorator, take a
function and return a wrapped function. If it's a contextmanager, use it
with the ``with`` statement. In either event, entering/exiting are called
before and after, respectively, the function/block is executed.
"""
enable_exception = None
def __init__(self, **kwargs):
self.options = kwargs
super().__init__()
def enable(self):
# Keep this code at the beginning to leave the settings unchanged
# in case it raises an exception because INSTALLED_APPS is invalid.
if "INSTALLED_APPS" in self.options:
try:
apps.set_installed_apps(self.options["INSTALLED_APPS"])
except Exception:
apps.unset_installed_apps()
raise
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
try:
setting_changed.send(
sender=settings._wrapped.__class__,
setting=key,
value=new_value,
enter=True,
)
except Exception as exc:
self.enable_exception = exc
self.disable()
def disable(self):
if "INSTALLED_APPS" in self.options:
apps.unset_installed_apps()
settings._wrapped = self.wrapped
del self.wrapped
responses = []
for key in self.options:
new_value = getattr(settings, key, None)
responses_for_setting = setting_changed.send_robust(
sender=settings._wrapped.__class__,
setting=key,
value=new_value,
enter=False,
)
responses.extend(responses_for_setting)
if self.enable_exception is not None:
exc = self.enable_exception
self.enable_exception = None
raise exc
for _, response in responses:
if isinstance(response, Exception):
raise response
def save_options(self, test_func):
if test_func._overridden_settings is None:
test_func._overridden_settings = self.options
else:
# Duplicate dict to prevent subclasses from altering their parent.
test_func._overridden_settings = {
**test_func._overridden_settings,
**self.options,
}
def decorate_class(self, cls):
from django.test import SimpleTestCase
if not issubclass(cls, SimpleTestCase):
raise ValueError(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings"
)
self.save_options(cls)
return cls
class modify_settings(override_settings):
"""
Like override_settings, but makes it possible to append, prepend, or remove
items instead of redefining the entire list.
"""
def __init__(self, *args, **kwargs):
if args:
# Hack used when instantiating from SimpleTestCase.setUpClass.
assert not kwargs
self.operations = args[0]
else:
assert not args
self.operations = list(kwargs.items())
super(override_settings, self).__init__()
def save_options(self, test_func):
if test_func._modified_settings is None:
test_func._modified_settings = self.operations
else:
# Duplicate list to prevent subclasses from altering their parent.
test_func._modified_settings = (
list(test_func._modified_settings) + self.operations
)
def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase.setUpClass, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except KeyError:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, str):
items = [items]
if action == "append":
value += [item for item in items if item not in value]
elif action == "prepend":
value = [item for item in items if item not in value] + value
elif action == "remove":
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super().enable()
class override_system_checks(TestContextDecorator):
"""
Act as a decorator. Override list of registered system checks.
Useful when you override `INSTALLED_APPS`, e.g. if you exclude `auth` app,
you also need to exclude its system checks.
"""
def __init__(self, new_checks, deployment_checks=None):
from django.core.checks.registry import registry
self.registry = registry
self.new_checks = new_checks
self.deployment_checks = deployment_checks
super().__init__()
def enable(self):
self.old_checks = self.registry.registered_checks
self.registry.registered_checks = set()
for check in self.new_checks:
self.registry.register(check, *getattr(check, "tags", ()))
self.old_deployment_checks = self.registry.deployment_checks
if self.deployment_checks is not None:
self.registry.deployment_checks = set()
for check in self.deployment_checks:
self.registry.register(check, *getattr(check, "tags", ()), deploy=True)
def disable(self):
self.registry.registered_checks = self.old_checks
self.registry.deployment_checks = self.old_deployment_checks
def compare_xml(want, got):
"""
Try to do a 'xml-comparison' of want and got. Plain string comparison
doesn't always work because, for example, attribute ordering should not be
important. Ignore comment nodes, processing instructions, document type
node, and leading and trailing whitespaces.
Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r"[ \t\n][ \t\n]+")
def norm_whitespace(v):
return _norm_whitespace_re.sub(" ", v)
def child_text(element):
return "".join(
c.data for c in element.childNodes if c.nodeType == Node.TEXT_NODE
)
def children(element):
return [c for c in element.childNodes if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
return all(
check_element(want, got) for want, got in zip(want_children, got_children)
)
def first_node(document):
for node in document.childNodes:
if node.nodeType not in (
Node.COMMENT_NODE,
Node.DOCUMENT_TYPE_NODE,
Node.PROCESSING_INSTRUCTION_NODE,
):
return node
want = want.strip().replace("\\n", "\n")
got = got.strip().replace("\\n", "\n")
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith("<?xml"):
wrapper = "<root>%s</root>"
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
class CaptureQueriesContext:
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries : self.final_queries]
def __enter__(self):
self.force_debug_cursor = self.connection.force_debug_cursor
self.connection.force_debug_cursor = True
# Run any initialization queries if needed so that they won't be
# included as part of the count.
self.connection.ensure_connection()
self.initial_queries = len(self.connection.queries_log)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.force_debug_cursor = self.force_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries_log)
class ignore_warnings(TestContextDecorator):
def __init__(self, **kwargs):
self.ignore_kwargs = kwargs
if "message" in self.ignore_kwargs or "module" in self.ignore_kwargs:
self.filter_func = warnings.filterwarnings
else:
self.filter_func = warnings.simplefilter
super().__init__()
def enable(self):
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
self.filter_func("ignore", **self.ignore_kwargs)
def disable(self):
self.catch_warnings.__exit__(*sys.exc_info())
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(
TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that.",
)
@contextmanager
def extend_sys_path(*paths):
"""Context manager to temporarily add paths to sys.path."""
_orig_sys_path = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = _orig_sys_path
@contextmanager
def isolate_lru_cache(lru_cache_object):
"""Clear the cache of an LRU cache object on entering and exiting."""
lru_cache_object.cache_clear()
try:
yield
finally:
lru_cache_object.cache_clear()
@contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Note: This function and the following ``captured_std*`` are copied
from CPython's ``test.support`` module."""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
@contextmanager
def freeze_time(t):
"""
Context manager to temporarily freeze time.time(). This temporarily
modifies the time function of the time module. Modules which import the
time function directly (e.g. `from time import time`) won't be affected
This isn't meant as a public API, but helps reduce some repetitive code in
Django's test suite.
"""
_real_time = time.time
time.time = lambda: t
try:
yield
finally:
time.time = _real_time
def require_jinja2(test_func):
"""
Decorator to enable a Jinja2 template engine in addition to the regular
Django template engine for a test or skip it if Jinja2 isn't available.
"""
test_func = skipIf(jinja2 is None, "this test requires jinja2")(test_func)
return override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
{
"BACKEND": "django.template.backends.jinja2.Jinja2",
"APP_DIRS": True,
"OPTIONS": {"keep_trailing_newline": True},
},
]
)(test_func)
class override_script_prefix(TestContextDecorator):
"""Decorator or context manager to temporary override the script prefix."""
def __init__(self, prefix):
self.prefix = prefix
super().__init__()
def enable(self):
self.old_prefix = get_script_prefix()
set_script_prefix(self.prefix)
def disable(self):
set_script_prefix(self.old_prefix)
class LoggingCaptureMixin:
"""
Capture the output from the 'django' logger and store it on the class's
logger_output attribute.
"""
def setUp(self):
self.logger = logging.getLogger("django")
self.old_stream = self.logger.handlers[0].stream
self.logger_output = StringIO()
self.logger.handlers[0].stream = self.logger_output
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
class isolate_apps(TestContextDecorator):
"""
Act as either a decorator or a context manager to register models defined
in its wrapped context to an isolated registry.
The list of installed apps the isolated registry should contain must be
passed as arguments.
Two optional keyword arguments can be specified:
`attr_name`: attribute assigned the isolated registry if used as a class
decorator.
`kwarg_name`: keyword argument passing the isolated registry if used as a
function decorator.
"""
def __init__(self, *installed_apps, **kwargs):
self.installed_apps = installed_apps
super().__init__(**kwargs)
def enable(self):
self.old_apps = Options.default_apps
apps = Apps(self.installed_apps)
setattr(Options, "default_apps", apps)
return apps
def disable(self):
setattr(Options, "default_apps", self.old_apps)
class TimeKeeper:
def __init__(self):
self.records = collections.defaultdict(list)
@contextmanager
def timed(self, name):
self.records[name]
start_time = time.perf_counter()
try:
yield
finally:
end_time = time.perf_counter() - start_time
self.records[name].append(end_time)
def print_results(self):
for name, end_times in self.records.items():
for record_time in end_times:
record = "%s took %.3fs" % (name, record_time)
sys.stderr.write(record + os.linesep)
class NullTimeKeeper:
@contextmanager
def timed(self, name):
yield
def print_results(self):
pass
def tag(*tags):
"""Decorator to add tags to a test class or method."""
def decorator(obj):
if hasattr(obj, "tags"):
obj.tags = obj.tags.union(tags)
else:
setattr(obj, "tags", set(tags))
return obj
return decorator
@contextmanager
def register_lookup(field, *lookups, lookup_name=None):
"""
Context manager to temporarily register lookups on a model field using
lookup_name (or the lookup's lookup_name if not provided).
"""
try:
for lookup in lookups:
field.register_lookup(lookup, lookup_name)
yield
finally:
for lookup in lookups:
field._unregister_lookup(lookup, lookup_name)
|
32787eeaf509b5ff3385c04ca4afbf1647c06739cd31e1de9ea81261ae59a58b | import os
import time
import warnings
from asgiref.local import Local
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.db import connections, router
from django.db.utils import ConnectionRouter
from django.dispatch import Signal, receiver
from django.utils import timezone
from django.utils.formats import FORMAT_SETTINGS, reset_format_cache
from django.utils.functional import empty
from django.utils.module_loading import import_string
template_rendered = Signal()
# Most setting_changed receivers are supposed to be added below,
# except for cases where the receiver is related to a contrib app.
# Settings that may not work well when using 'override_settings' (#19031)
COMPLEX_OVERRIDE_SETTINGS = {"DATABASES"}
@receiver(setting_changed)
def clear_cache_handlers(*, setting, **kwargs):
if setting == "CACHES":
from django.core.cache import caches, close_caches
close_caches()
caches._settings = caches.settings = caches.configure_settings(None)
caches._connections = Local()
@receiver(setting_changed)
def update_installed_apps(*, setting, **kwargs):
if setting == "INSTALLED_APPS":
# Rebuild any AppDirectoriesFinder instance.
from django.contrib.staticfiles.finders import get_finder
get_finder.cache_clear()
# Rebuild management commands cache
from django.core.management import get_commands
get_commands.cache_clear()
# Rebuild get_app_template_dirs cache.
from django.template.utils import get_app_template_dirs
get_app_template_dirs.cache_clear()
# Rebuild translations cache.
from django.utils.translation import trans_real
trans_real._translations = {}
@receiver(setting_changed)
def update_connections_time_zone(*, setting, **kwargs):
if setting == "TIME_ZONE":
# Reset process time zone
if hasattr(time, "tzset"):
if kwargs["value"]:
os.environ["TZ"] = kwargs["value"]
else:
os.environ.pop("TZ", None)
time.tzset()
# Reset local time zone cache
timezone.get_default_timezone.cache_clear()
# Reset the database connections' time zone
if setting in {"TIME_ZONE", "USE_TZ"}:
for conn in connections.all(initialized_only=True):
try:
del conn.timezone
except AttributeError:
pass
try:
del conn.timezone_name
except AttributeError:
pass
conn.ensure_timezone()
@receiver(setting_changed)
def clear_routers_cache(*, setting, **kwargs):
if setting == "DATABASE_ROUTERS":
router.routers = ConnectionRouter().routers
@receiver(setting_changed)
def reset_template_engines(*, setting, **kwargs):
if setting in {
"TEMPLATES",
"DEBUG",
"INSTALLED_APPS",
}:
from django.template import engines
try:
del engines.templates
except AttributeError:
pass
engines._templates = None
engines._engines = {}
from django.template.engine import Engine
Engine.get_default.cache_clear()
from django.forms.renderers import get_default_renderer
get_default_renderer.cache_clear()
@receiver(setting_changed)
def storages_changed(*, setting, **kwargs):
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.files.storage import default_storage, storages
if setting in (
"STORAGES",
"STATIC_ROOT",
"STATIC_URL",
):
try:
del storages.backends
except AttributeError:
pass
storages._backends = None
storages._storages = {}
default_storage._wrapped = empty
staticfiles_storage._wrapped = empty
@receiver(setting_changed)
def clear_serializers_cache(*, setting, **kwargs):
if setting == "SERIALIZATION_MODULES":
from django.core import serializers
serializers._serializers = {}
@receiver(setting_changed)
def language_changed(*, setting, **kwargs):
if setting in {"LANGUAGES", "LANGUAGE_CODE", "LOCALE_PATHS"}:
from django.utils.translation import trans_real
trans_real._default = None
trans_real._active = Local()
if setting in {"LANGUAGES", "LOCALE_PATHS"}:
from django.utils.translation import trans_real
trans_real._translations = {}
trans_real.check_for_language.cache_clear()
@receiver(setting_changed)
def localize_settings_changed(*, setting, **kwargs):
if setting in FORMAT_SETTINGS or setting == "USE_THOUSAND_SEPARATOR":
reset_format_cache()
# RemovedInDjango51Warning.
@receiver(setting_changed)
def file_storage_changed(*, setting, **kwargs):
if setting == "DEFAULT_FILE_STORAGE":
from django.conf import DEFAULT_STORAGE_ALIAS
from django.core.files.storage import default_storage, storages
try:
del storages.backends
except AttributeError:
pass
storages._storages[DEFAULT_STORAGE_ALIAS] = import_string(kwargs["value"])()
default_storage._wrapped = empty
@receiver(setting_changed)
def complex_setting_changed(*, enter, setting, **kwargs):
if enter and setting in COMPLEX_OVERRIDE_SETTINGS:
# Considering the current implementation of the signals framework,
# this stacklevel shows the line containing the override_settings call.
warnings.warn(
f"Overriding setting {setting} can lead to unexpected behavior.",
stacklevel=6,
)
@receiver(setting_changed)
def root_urlconf_changed(*, setting, **kwargs):
if setting == "ROOT_URLCONF":
from django.urls import clear_url_caches, set_urlconf
clear_url_caches()
set_urlconf(None)
@receiver(setting_changed)
def static_storage_changed(*, setting, **kwargs):
if setting in {
"STATICFILES_STORAGE",
"STATIC_ROOT",
"STATIC_URL",
}:
from django.contrib.staticfiles.storage import staticfiles_storage
staticfiles_storage._wrapped = empty
# RemovedInDjango51Warning.
if setting == "STATICFILES_STORAGE":
from django.conf import STATICFILES_STORAGE_ALIAS
from django.core.files.storage import storages
try:
del storages.backends
except AttributeError:
pass
storages._storages[STATICFILES_STORAGE_ALIAS] = import_string(kwargs["value"])()
@receiver(setting_changed)
def static_finders_changed(*, setting, **kwargs):
if setting in {
"STATICFILES_DIRS",
"STATIC_ROOT",
}:
from django.contrib.staticfiles.finders import get_finder
get_finder.cache_clear()
@receiver(setting_changed)
def auth_password_validators_changed(*, setting, **kwargs):
if setting == "AUTH_PASSWORD_VALIDATORS":
from django.contrib.auth.password_validation import (
get_default_password_validators,
)
get_default_password_validators.cache_clear()
@receiver(setting_changed)
def user_model_swapped(*, setting, **kwargs):
if setting == "AUTH_USER_MODEL":
apps.clear_cache()
try:
from django.contrib.auth import get_user_model
UserModel = get_user_model()
except ImproperlyConfigured:
# Some tests set an invalid AUTH_USER_MODEL.
pass
else:
from django.contrib.auth import backends
backends.UserModel = UserModel
from django.contrib.auth import forms
forms.UserModel = UserModel
from django.contrib.auth.handlers import modwsgi
modwsgi.UserModel = UserModel
from django.contrib.auth.management.commands import changepassword
changepassword.UserModel = UserModel
from django.contrib.auth import views
views.UserModel = UserModel
|
9d3b15390ecec50c1c994ac2044b81defb2e58790d9ac02a91689e514d57569e | import functools
import sys
import threading
import warnings
from collections import Counter, defaultdict
from functools import partial
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from .config import AppConfig
class Apps:
"""
A registry that stores the configuration of installed applications.
It also keeps track of models, e.g. to provide reverse relations.
"""
def __init__(self, installed_apps=()):
# installed_apps is set to None when creating the main registry
# because it cannot be populated at that point. Other registries must
# provide a list of installed apps and are populated immediately.
if installed_apps is None and hasattr(sys.modules[__name__], "apps"):
raise RuntimeError("You must supply an installed_apps argument.")
# Mapping of app labels => model names => model classes. Every time a
# model is imported, ModelBase.__new__ calls apps.register_model which
# creates an entry in all_models. All imported models are registered,
# regardless of whether they're defined in an installed application
# and whether the registry has been populated. Since it isn't possible
# to reimport a module safely (it could reexecute initialization code)
# all_models is never overridden or reset.
self.all_models = defaultdict(dict)
# Mapping of labels to AppConfig instances for installed apps.
self.app_configs = {}
# Stack of app_configs. Used to store the current state in
# set_available_apps and set_installed_apps.
self.stored_app_configs = []
# Whether the registry is populated.
self.apps_ready = self.models_ready = self.ready = False
# For the autoreloader.
self.ready_event = threading.Event()
# Lock for thread-safe population.
self._lock = threading.RLock()
self.loading = False
# Maps ("app_label", "modelname") tuples to lists of functions to be
# called when the corresponding model is ready. Used by this class's
# `lazy_model_operation()` and `do_pending_operations()` methods.
self._pending_operations = defaultdict(list)
# Populate apps and models, unless it's the main registry.
if installed_apps is not None:
self.populate(installed_apps)
def populate(self, installed_apps=None):
"""
Load application configurations and models.
Import each application module and then each model module.
It is thread-safe and idempotent, but not reentrant.
"""
if self.ready:
return
# populate() might be called by two threads in parallel on servers
# that create threads before initializing the WSGI callable.
with self._lock:
if self.ready:
return
# An RLock prevents other threads from entering this section. The
# compare and set operation below is atomic.
if self.loading:
# Prevent reentrant calls to avoid running AppConfig.ready()
# methods twice.
raise RuntimeError("populate() isn't reentrant")
self.loading = True
# Phase 1: initialize app configs and import app modules.
for entry in installed_apps:
if isinstance(entry, AppConfig):
app_config = entry
else:
app_config = AppConfig.create(entry)
if app_config.label in self.app_configs:
raise ImproperlyConfigured(
"Application labels aren't unique, "
"duplicates: %s" % app_config.label
)
self.app_configs[app_config.label] = app_config
app_config.apps = self
# Check for duplicate app names.
counts = Counter(
app_config.name for app_config in self.app_configs.values()
)
duplicates = [name for name, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Application names aren't unique, "
"duplicates: %s" % ", ".join(duplicates)
)
self.apps_ready = True
# Phase 2: import models modules.
for app_config in self.app_configs.values():
app_config.import_models()
self.clear_cache()
self.models_ready = True
# Phase 3: run ready() methods of app configs.
for app_config in self.get_app_configs():
app_config.ready()
self.ready = True
self.ready_event.set()
def check_apps_ready(self):
"""Raise an exception if all apps haven't been imported yet."""
if not self.apps_ready:
from django.conf import settings
# If "not ready" is due to unconfigured settings, accessing
# INSTALLED_APPS raises a more helpful ImproperlyConfigured
# exception.
settings.INSTALLED_APPS
raise AppRegistryNotReady("Apps aren't loaded yet.")
def check_models_ready(self):
"""Raise an exception if all models haven't been imported yet."""
if not self.models_ready:
raise AppRegistryNotReady("Models aren't loaded yet.")
def get_app_configs(self):
"""Import applications and return an iterable of app configs."""
self.check_apps_ready()
return self.app_configs.values()
def get_app_config(self, app_label):
"""
Import applications and returns an app config for the given label.
Raise LookupError if no application exists with this label.
"""
self.check_apps_ready()
try:
return self.app_configs[app_label]
except KeyError:
message = "No installed app with label '%s'." % app_label
for app_config in self.get_app_configs():
if app_config.name == app_label:
message += " Did you mean '%s'?" % app_config.label
break
raise LookupError(message)
# This method is performance-critical at least for Django's test suite.
@functools.cache
def get_models(self, include_auto_created=False, include_swapped=False):
"""
Return a list of all installed models.
By default, the following models aren't included:
- auto-created models for many-to-many relations without
an explicit intermediate table,
- models that have been swapped out.
Set the corresponding keyword argument to True to include such models.
"""
self.check_models_ready()
result = []
for app_config in self.app_configs.values():
result.extend(app_config.get_models(include_auto_created, include_swapped))
return result
def get_model(self, app_label, model_name=None, require_ready=True):
"""
Return the model matching the given app_label and model_name.
As a shortcut, app_label may be in the form <app_label>.<model_name>.
model_name is case-insensitive.
Raise LookupError if no application exists with this label, or no
model exists with this name in the application. Raise ValueError if
called with a single argument that doesn't contain exactly one dot.
"""
if require_ready:
self.check_models_ready()
else:
self.check_apps_ready()
if model_name is None:
app_label, model_name = app_label.split(".")
app_config = self.get_app_config(app_label)
if not require_ready and app_config.models is None:
app_config.import_models()
return app_config.get_model(model_name, require_ready=require_ready)
def register_model(self, app_label, model):
# Since this method is called when models are imported, it cannot
# perform imports because of the risk of import loops. It mustn't
# call get_app_config().
model_name = model._meta.model_name
app_models = self.all_models[app_label]
if model_name in app_models:
if (
model.__name__ == app_models[model_name].__name__
and model.__module__ == app_models[model_name].__module__
):
warnings.warn(
"Model '%s.%s' was already registered. Reloading models is not "
"advised as it can lead to inconsistencies, most notably with "
"related models." % (app_label, model_name),
RuntimeWarning,
stacklevel=2,
)
else:
raise RuntimeError(
"Conflicting '%s' models in application '%s': %s and %s."
% (model_name, app_label, app_models[model_name], model)
)
app_models[model_name] = model
self.do_pending_operations(model)
self.clear_cache()
def is_installed(self, app_name):
"""
Check whether an application with this name exists in the registry.
app_name is the full name of the app e.g. 'django.contrib.admin'.
"""
self.check_apps_ready()
return any(ac.name == app_name for ac in self.app_configs.values())
def get_containing_app_config(self, object_name):
"""
Look for an app config containing a given object.
object_name is the dotted Python path to the object.
Return the app config for the inner application in case of nesting.
Return None if the object isn't in any registered app config.
"""
self.check_apps_ready()
candidates = []
for app_config in self.app_configs.values():
if object_name.startswith(app_config.name):
subpath = object_name.removeprefix(app_config.name)
if subpath == "" or subpath[0] == ".":
candidates.append(app_config)
if candidates:
return sorted(candidates, key=lambda ac: -len(ac.name))[0]
def get_registered_model(self, app_label, model_name):
"""
Similar to get_model(), but doesn't require that an app exists with
the given app_label.
It's safe to call this method at import time, even while the registry
is being populated.
"""
model = self.all_models[app_label].get(model_name.lower())
if model is None:
raise LookupError("Model '%s.%s' not registered." % (app_label, model_name))
return model
@functools.cache
def get_swappable_settings_name(self, to_string):
"""
For a given model string (e.g. "auth.User"), return the name of the
corresponding settings name if it refers to a swappable model. If the
referred model is not swappable, return None.
This method is decorated with @functools.cache because it's performance
critical when it comes to migrations. Since the swappable settings don't
change after Django has loaded the settings, there is no reason to get
the respective settings attribute over and over again.
"""
to_string = to_string.lower()
for model in self.get_models(include_swapped=True):
swapped = model._meta.swapped
# Is this model swapped out for the model given by to_string?
if swapped and swapped.lower() == to_string:
return model._meta.swappable
# Is this model swappable and the one given by to_string?
if model._meta.swappable and model._meta.label_lower == to_string:
return model._meta.swappable
return None
def set_available_apps(self, available):
"""
Restrict the set of installed apps used by get_app_config[s].
available must be an iterable of application names.
set_available_apps() must be balanced with unset_available_apps().
Primarily used for performance optimization in TransactionTestCase.
This method is safe in the sense that it doesn't trigger any imports.
"""
available = set(available)
installed = {app_config.name for app_config in self.get_app_configs()}
if not available.issubset(installed):
raise ValueError(
"Available apps isn't a subset of installed apps, extra apps: %s"
% ", ".join(available - installed)
)
self.stored_app_configs.append(self.app_configs)
self.app_configs = {
label: app_config
for label, app_config in self.app_configs.items()
if app_config.name in available
}
self.clear_cache()
def unset_available_apps(self):
"""Cancel a previous call to set_available_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.clear_cache()
def set_installed_apps(self, installed):
"""
Enable a different set of installed apps for get_app_config[s].
installed must be an iterable in the same format as INSTALLED_APPS.
set_installed_apps() must be balanced with unset_installed_apps(),
even if it exits with an exception.
Primarily used as a receiver of the setting_changed signal in tests.
This method may trigger new imports, which may add new models to the
registry of all imported models. They will stay in the registry even
after unset_installed_apps(). Since it isn't possible to replay
imports safely (e.g. that could lead to registering listeners twice),
models are registered when they're imported and never removed.
"""
if not self.ready:
raise AppRegistryNotReady("App registry isn't ready yet.")
self.stored_app_configs.append(self.app_configs)
self.app_configs = {}
self.apps_ready = self.models_ready = self.loading = self.ready = False
self.clear_cache()
self.populate(installed)
def unset_installed_apps(self):
"""Cancel a previous call to set_installed_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.apps_ready = self.models_ready = self.ready = True
self.clear_cache()
def clear_cache(self):
"""
Clear all internal caches, for methods that alter the app registry.
This is mostly used in tests.
"""
# Call expire cache on each model. This will purge
# the relation tree and the fields cache.
self.get_models.cache_clear()
if self.ready:
# Circumvent self.get_models() to prevent that the cache is refilled.
# This particularly prevents that an empty value is cached while cloning.
for app_config in self.app_configs.values():
for model in app_config.get_models(include_auto_created=True):
model._meta._expire_cache()
def lazy_model_operation(self, function, *model_keys):
"""
Take a function and a number of ("app_label", "modelname") tuples, and
when all the corresponding models have been imported and registered,
call the function with the model classes as its arguments.
The function passed to this method must accept exactly n models as
arguments, where n=len(model_keys).
"""
# Base case: no arguments, just execute the function.
if not model_keys:
function()
# Recursive case: take the head of model_keys, wait for the
# corresponding model class to be imported and registered, then apply
# that argument to the supplied function. Pass the resulting partial
# to lazy_model_operation() along with the remaining model args and
# repeat until all models are loaded and all arguments are applied.
else:
next_model, *more_models = model_keys
# This will be executed after the class corresponding to next_model
# has been imported and registered. The `func` attribute provides
# duck-type compatibility with partials.
def apply_next_model(model):
next_function = partial(apply_next_model.func, model)
self.lazy_model_operation(next_function, *more_models)
apply_next_model.func = function
# If the model has already been imported and registered, partially
# apply it to the function now. If not, add it to the list of
# pending operations for the model, where it will be executed with
# the model class as its sole argument once the model is ready.
try:
model_class = self.get_registered_model(*next_model)
except LookupError:
self._pending_operations[next_model].append(apply_next_model)
else:
apply_next_model(model_class)
def do_pending_operations(self, model):
"""
Take a newly-prepared model and pass it to each function waiting for
it. This is called at the very end of Apps.register_model().
"""
key = model._meta.app_label, model._meta.model_name
for function in self._pending_operations.pop(key, []):
function(model)
apps = Apps(installed_apps=None)
|
9e6a4048a755a2320ddc9f2213e6701111db76451abba3db84e0ca0ea45e3a46 | """
Settings and configuration for Django.
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global_settings.py
for a list of all possible variables.
"""
import importlib
import os
import time
import traceback
import warnings
from pathlib import Path
import django
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.functional import LazyObject, empty
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
DEFAULT_STORAGE_ALIAS = "default"
STATICFILES_STORAGE_ALIAS = "staticfiles"
DEFAULT_FILE_STORAGE_DEPRECATED_MSG = (
"The DEFAULT_FILE_STORAGE setting is deprecated. Use STORAGES instead."
)
STATICFILES_STORAGE_DEPRECATED_MSG = (
"The STATICFILES_STORAGE setting is deprecated. Use STORAGES instead."
)
class SettingsReference(str):
"""
String subclass which references a current settings value. It's treated as
the value in memory but serializes to a settings.NAME attribute reference.
"""
def __new__(self, value, setting_name):
return str.__new__(self, value)
def __init__(self, value, setting_name):
self.setting_name = setting_name
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time settings are needed, if the user hasn't
configured settings manually.
"""
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
if not settings_module:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE)
)
self._wrapped = Settings(settings_module)
def __repr__(self):
# Hardcode the class name as otherwise it yields 'Settings'.
if self._wrapped is empty:
return "<LazySettings [Unevaluated]>"
return '<LazySettings "%(settings_module)s">' % {
"settings_module": self._wrapped.SETTINGS_MODULE,
}
def __getattr__(self, name):
"""Return the value of a setting and cache it in self.__dict__."""
if (_wrapped := self._wrapped) is empty:
self._setup(name)
_wrapped = self._wrapped
val = getattr(_wrapped, name)
# Special case some settings which require further modification.
# This is done here for performance reasons so the modified value is cached.
if name in {"MEDIA_URL", "STATIC_URL"} and val is not None:
val = self._add_script_prefix(val)
elif name == "SECRET_KEY" and not val:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
self.__dict__[name] = val
return val
def __setattr__(self, name, value):
"""
Set the value of setting. Clear all cached values if _wrapped changes
(@override_settings does this) or clear single values when set.
"""
if name == "_wrapped":
self.__dict__.clear()
else:
self.__dict__.pop(name, None)
super().__setattr__(name, value)
def __delattr__(self, name):
"""Delete a setting and clear it from cache if needed."""
super().__delattr__(name)
self.__dict__.pop(name, None)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError("Settings already configured.")
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
if not name.isupper():
raise TypeError("Setting %r must be uppercase." % name)
setattr(holder, name, value)
self._wrapped = holder
@staticmethod
def _add_script_prefix(value):
"""
Add SCRIPT_NAME prefix to relative paths.
Useful when the app is being served at a subpath and manually prefixing
subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
"""
# Don't apply prefix to absolute paths and URLs.
if value.startswith(("http://", "https://", "/")):
return value
from django.urls import get_script_prefix
return "%s%s" % (get_script_prefix(), value)
@property
def configured(self):
"""Return True if the settings have already been configured."""
return self._wrapped is not empty
def _show_deprecation_warning(self, message, category):
stack = traceback.extract_stack()
# Show a warning if the setting is used outside of Django.
# Stack index: -1 this line, -2 the property, -3 the
# LazyObject __getattribute__(), -4 the caller.
filename, _, _, _ = stack[-4]
if not filename.startswith(os.path.dirname(django.__file__)):
warnings.warn(message, category, stacklevel=2)
# RemovedInDjango51Warning.
@property
def DEFAULT_FILE_STORAGE(self):
self._show_deprecation_warning(
DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
)
return self.__getattr__("DEFAULT_FILE_STORAGE")
# RemovedInDjango51Warning.
@property
def STATICFILES_STORAGE(self):
self._show_deprecation_warning(
STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
)
return self.__getattr__("STATICFILES_STORAGE")
class Settings:
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
"ALLOWED_HOSTS",
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
"SECRET_KEY_FALLBACKS",
)
self._explicit_settings = set()
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and not isinstance(
setting_value, (list, tuple)
):
raise ImproperlyConfigured(
"The %s setting must be a list or a tuple." % setting
)
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
if hasattr(time, "tzset") and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = Path("/usr/share/zoneinfo")
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split("/"))
if zoneinfo_root.exists() and not zone_info_file.exists():
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ["TZ"] = self.TIME_ZONE
time.tzset()
if self.is_overridden("DEFAULT_FILE_STORAGE"):
if self.is_overridden("STORAGES"):
raise ImproperlyConfigured(
"DEFAULT_FILE_STORAGE/STORAGES are mutually exclusive."
)
warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
if self.is_overridden("STATICFILES_STORAGE"):
if self.is_overridden("STORAGES"):
raise ImproperlyConfigured(
"STATICFILES_STORAGE/STORAGES are mutually exclusive."
)
warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
def is_overridden(self, setting):
return setting in self._explicit_settings
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
"cls": self.__class__.__name__,
"settings_module": self.SETTINGS_MODULE,
}
class UserSettingsHolder:
"""Holder for user configured settings."""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__["_deleted"] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if not name.isupper() or name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
if name == "DEFAULT_FILE_STORAGE":
self.STORAGES[DEFAULT_STORAGE_ALIAS] = {
"BACKEND": self.DEFAULT_FILE_STORAGE
}
warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
if name == "STATICFILES_STORAGE":
self.STORAGES[STATICFILES_STORAGE_ALIAS] = {
"BACKEND": self.STATICFILES_STORAGE
}
warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
super().__setattr__(name, value)
# RemovedInDjango51Warning.
if name == "STORAGES":
self.STORAGES.setdefault(
DEFAULT_STORAGE_ALIAS,
{"BACKEND": "django.core.files.storage.FileSystemStorage"},
)
self.STORAGES.setdefault(
STATICFILES_STORAGE_ALIAS,
{"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage"},
)
def __delattr__(self, name):
self._deleted.add(name)
if hasattr(self, name):
super().__delattr__(name)
def __dir__(self):
return sorted(
s
for s in [*self.__dict__, *dir(self.default_settings)]
if s not in self._deleted
)
def is_overridden(self, setting):
deleted = setting in self._deleted
set_locally = setting in self.__dict__
set_on_default = getattr(
self.default_settings, "is_overridden", lambda s: False
)(setting)
return deleted or set_locally or set_on_default
def __repr__(self):
return "<%(cls)s>" % {
"cls": self.__class__.__name__,
}
settings = LazySettings()
|
baf6f490aa204efd96fdfa9df9992bbe39362db176be166eb0124d17fb177ce0 | """
Default Django settings. Override these with settings in the module pointed to
by the DJANGO_SETTINGS_MODULE environment variable.
"""
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
def gettext_noop(s):
return s
####################
# CORE #
####################
DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# People who get code error notifications. In the format
# [('Full Name', '[email protected]'), ('Full Name', '[email protected]')]
ADMINS = []
# List of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = []
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = "America/Chicago"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-us"
# Languages we provide translations for, out of the box.
LANGUAGES = [
("af", gettext_noop("Afrikaans")),
("ar", gettext_noop("Arabic")),
("ar-dz", gettext_noop("Algerian Arabic")),
("ast", gettext_noop("Asturian")),
("az", gettext_noop("Azerbaijani")),
("bg", gettext_noop("Bulgarian")),
("be", gettext_noop("Belarusian")),
("bn", gettext_noop("Bengali")),
("br", gettext_noop("Breton")),
("bs", gettext_noop("Bosnian")),
("ca", gettext_noop("Catalan")),
("ckb", gettext_noop("Central Kurdish (Sorani)")),
("cs", gettext_noop("Czech")),
("cy", gettext_noop("Welsh")),
("da", gettext_noop("Danish")),
("de", gettext_noop("German")),
("dsb", gettext_noop("Lower Sorbian")),
("el", gettext_noop("Greek")),
("en", gettext_noop("English")),
("en-au", gettext_noop("Australian English")),
("en-gb", gettext_noop("British English")),
("eo", gettext_noop("Esperanto")),
("es", gettext_noop("Spanish")),
("es-ar", gettext_noop("Argentinian Spanish")),
("es-co", gettext_noop("Colombian Spanish")),
("es-mx", gettext_noop("Mexican Spanish")),
("es-ni", gettext_noop("Nicaraguan Spanish")),
("es-ve", gettext_noop("Venezuelan Spanish")),
("et", gettext_noop("Estonian")),
("eu", gettext_noop("Basque")),
("fa", gettext_noop("Persian")),
("fi", gettext_noop("Finnish")),
("fr", gettext_noop("French")),
("fy", gettext_noop("Frisian")),
("ga", gettext_noop("Irish")),
("gd", gettext_noop("Scottish Gaelic")),
("gl", gettext_noop("Galician")),
("he", gettext_noop("Hebrew")),
("hi", gettext_noop("Hindi")),
("hr", gettext_noop("Croatian")),
("hsb", gettext_noop("Upper Sorbian")),
("hu", gettext_noop("Hungarian")),
("hy", gettext_noop("Armenian")),
("ia", gettext_noop("Interlingua")),
("id", gettext_noop("Indonesian")),
("ig", gettext_noop("Igbo")),
("io", gettext_noop("Ido")),
("is", gettext_noop("Icelandic")),
("it", gettext_noop("Italian")),
("ja", gettext_noop("Japanese")),
("ka", gettext_noop("Georgian")),
("kab", gettext_noop("Kabyle")),
("kk", gettext_noop("Kazakh")),
("km", gettext_noop("Khmer")),
("kn", gettext_noop("Kannada")),
("ko", gettext_noop("Korean")),
("ky", gettext_noop("Kyrgyz")),
("lb", gettext_noop("Luxembourgish")),
("lt", gettext_noop("Lithuanian")),
("lv", gettext_noop("Latvian")),
("mk", gettext_noop("Macedonian")),
("ml", gettext_noop("Malayalam")),
("mn", gettext_noop("Mongolian")),
("mr", gettext_noop("Marathi")),
("ms", gettext_noop("Malay")),
("my", gettext_noop("Burmese")),
("nb", gettext_noop("Norwegian Bokmål")),
("ne", gettext_noop("Nepali")),
("nl", gettext_noop("Dutch")),
("nn", gettext_noop("Norwegian Nynorsk")),
("os", gettext_noop("Ossetic")),
("pa", gettext_noop("Punjabi")),
("pl", gettext_noop("Polish")),
("pt", gettext_noop("Portuguese")),
("pt-br", gettext_noop("Brazilian Portuguese")),
("ro", gettext_noop("Romanian")),
("ru", gettext_noop("Russian")),
("sk", gettext_noop("Slovak")),
("sl", gettext_noop("Slovenian")),
("sq", gettext_noop("Albanian")),
("sr", gettext_noop("Serbian")),
("sr-latn", gettext_noop("Serbian Latin")),
("sv", gettext_noop("Swedish")),
("sw", gettext_noop("Swahili")),
("ta", gettext_noop("Tamil")),
("te", gettext_noop("Telugu")),
("tg", gettext_noop("Tajik")),
("th", gettext_noop("Thai")),
("tk", gettext_noop("Turkmen")),
("tr", gettext_noop("Turkish")),
("tt", gettext_noop("Tatar")),
("udm", gettext_noop("Udmurt")),
("uk", gettext_noop("Ukrainian")),
("ur", gettext_noop("Urdu")),
("uz", gettext_noop("Uzbek")),
("vi", gettext_noop("Vietnamese")),
("zh-hans", gettext_noop("Simplified Chinese")),
("zh-hant", gettext_noop("Traditional Chinese")),
]
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ["he", "ar", "ar-dz", "ckb", "fa", "ur"]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = []
# Settings for language cookie
LANGUAGE_COOKIE_NAME = "django_language"
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = "/"
LANGUAGE_COOKIE_SECURE = False
LANGUAGE_COOKIE_HTTPONLY = False
LANGUAGE_COOKIE_SAMESITE = None
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default charset to use for all HttpResponse objects, if a MIME type isn't
# manually specified. It's used to construct the Content-Type header.
DEFAULT_CHARSET = "utf-8"
# Email address that error messages come from.
SERVER_EMAIL = "root@localhost"
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
# Host for sending email.
EMAIL_HOST = "localhost"
# Port for sending email.
EMAIL_PORT = 25
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
EMAIL_USE_LOCALTIME = False
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
EMAIL_SSL_CERTFILE = None
EMAIL_SSL_KEYFILE = None
EMAIL_TIMEOUT = None
# List of strings representing installed apps.
INSTALLED_APPS = []
TEMPLATES = []
# Default form rendering class.
FORM_RENDERER = "django.forms.renderers.DjangoTemplates"
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = "webmaster@localhost"
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = "[Django] "
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = [
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search'),
# ]
DISALLOWED_USER_AGENTS = []
ABSOLUTE_URL_OVERRIDES = {}
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = [
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$'),
# re.compile(r'^/robots.txt$'),
# re.compile(r'^/phpmyadmin/'),
# re.compile(r'\.(cgi|php|pl)$'),
# ]
IGNORABLE_404_URLS = []
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ""
# List of secret keys used to verify the validity of signatures. This allows
# secret key rotation.
SECRET_KEY_FALLBACKS = []
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
},
}
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ""
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ""
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = [
"django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
]
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum size in bytes of request data (excluding file uploads) that will be
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum number of GET/POST parameters that will be read before a
# SuspiciousOperation (TooManyFieldsSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see
# https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = 0o644
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = "N j, Y"
# Default formatting for datetime objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = "N j, Y, P"
# Default formatting for time objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = "P"
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = "F Y"
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = "F j"
# Default short formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = "m/d/Y"
# Default short formatting for datetime objects.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = "m/d/Y P"
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = [
"%Y-%m-%d", # '2006-10-25'
"%m/%d/%Y", # '10/25/2006'
"%m/%d/%y", # '10/25/06'
"%b %d %Y", # 'Oct 25 2006'
"%b %d, %Y", # 'Oct 25, 2006'
"%d %b %Y", # '25 Oct 2006'
"%d %b, %Y", # '25 Oct, 2006'
"%B %d %Y", # 'October 25 2006'
"%B %d, %Y", # 'October 25, 2006'
"%d %B %Y", # '25 October 2006'
"%d %B, %Y", # '25 October, 2006'
]
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = [
"%H:%M:%S", # '14:30:59'
"%H:%M:%S.%f", # '14:30:59.000200'
"%H:%M", # '14:30'
]
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = [
"%Y-%m-%d %H:%M:%S", # '2006-10-25 14:30:59'
"%Y-%m-%d %H:%M:%S.%f", # '2006-10-25 14:30:59.000200'
"%Y-%m-%d %H:%M", # '2006-10-25 14:30'
"%m/%d/%Y %H:%M:%S", # '10/25/2006 14:30:59'
"%m/%d/%Y %H:%M:%S.%f", # '10/25/2006 14:30:59.000200'
"%m/%d/%Y %H:%M", # '10/25/2006 14:30'
"%m/%d/%y %H:%M:%S", # '10/25/06 14:30:59'
"%m/%d/%y %H:%M:%S.%f", # '10/25/06 14:30:59.000200'
"%m/%d/%y %H:%M", # '10/25/06 14:30'
]
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = "."
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ","
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ""
DEFAULT_INDEX_TABLESPACE = ""
# Default primary key field type.
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
# Default X-Frame-Options header value
X_FRAME_OPTIONS = "DENY"
USE_X_FORWARDED_HOST = False
USE_X_FORWARDED_PORT = False
# The Python dotted path to the WSGI application that Django's internal server
# (runserver) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware to use. Order is important; in the request phase, these
# middleware will be applied in the order given, and in the response
# phase the middleware will be applied in reverse order.
MIDDLEWARE = []
############
# SESSIONS #
############
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = "default"
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = "sessionid"
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like "example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = "/"
# Whether to use the HttpOnly flag.
SESSION_COOKIE_HTTPONLY = True
# Whether to set the flag restricting cookie leaks on cross-site requests.
# This can be 'Lax', 'Strict', 'None', or False to disable the flag.
SESSION_COOKIE_SAMESITE = "Lax"
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = "django.contrib.sessions.backends.db"
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = "django.contrib.sessions.serializers.JSONSerializer"
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ""
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = "default"
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = "auth.User"
AUTHENTICATION_BACKENDS = ["django.contrib.auth.backends.ModelBackend"]
LOGIN_URL = "/accounts/login/"
LOGIN_REDIRECT_URL = "/accounts/profile/"
LOGOUT_REDIRECT_URL = None
# The number of seconds a password reset link is valid for (default: 3 days).
PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
"django.contrib.auth.hashers.ScryptPasswordHasher",
]
AUTH_PASSWORD_VALIDATORS = []
###########
# SIGNING #
###########
SIGNING_BACKEND = "django.core.signing.TimestampSigner"
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = "django.views.csrf.csrf_failure"
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = "csrftoken"
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = "/"
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
CSRF_COOKIE_SAMESITE = "Lax"
CSRF_HEADER_NAME = "HTTP_X_CSRFTOKEN"
CSRF_TRUSTED_ORIGINS = []
CSRF_USE_SESSIONS = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = "django.contrib.messages.storage.fallback.FallbackStorage"
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = "logging.config.dictConfig"
# Custom logging configuration.
LOGGING = {}
# Default exception reporter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER = "django.views.debug.ExceptionReporter"
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = "django.views.debug.SafeExceptionReporterFilter"
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# Apps that don't need to be serialized at test database creation time
# (only apps with migrations are to start with)
TEST_NON_SERIALIZED_APPS = []
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = []
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = []
# The default file storage backend used during the build process
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
##############
# MIGRATIONS #
##############
# Migration module overrides for apps, by app label.
MIGRATION_MODULES = {}
#################
# SYSTEM CHECKS #
#################
# List of all issues generated by system checks that should be silenced. Light
# issues like warnings, infos or debugs will not generate a message. Silencing
# serious issues like errors and criticals does not result in hiding the
# message, but Django will not stop you from e.g. running server.
SILENCED_SYSTEM_CHECKS = []
#######################
# SECURITY MIDDLEWARE #
#######################
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_CROSS_ORIGIN_OPENER_POLICY = "same-origin"
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
SECURE_HSTS_PRELOAD = False
SECURE_HSTS_SECONDS = 0
SECURE_REDIRECT_EXEMPT = []
SECURE_REFERRER_POLICY = "same-origin"
SECURE_SSL_HOST = None
SECURE_SSL_REDIRECT = False
|
fb8aeda830408f89daeb70eb1305191da410a17c1ed62170d1a0ba4634cbc653 | "Functions that help with dynamically creating decorators for views."
from functools import partial, update_wrapper, wraps
class classonlymethod(classmethod):
def __get__(self, instance, cls=None):
if instance is not None:
raise AttributeError(
"This method is available only on the class, not on instances."
)
return super().__get__(instance, cls)
def _update_method_wrapper(_wrapper, decorator):
# _multi_decorate()'s bound_method isn't available in this scope. Cheat by
# using it on a dummy function.
@decorator
def dummy(*args, **kwargs):
pass
update_wrapper(_wrapper, dummy)
def _multi_decorate(decorators, method):
"""
Decorate `method` with one or more function decorators. `decorators` can be
a single decorator or an iterable of decorators.
"""
if hasattr(decorators, "__iter__"):
# Apply a list/tuple of decorators if 'decorators' is one. Decorator
# functions are applied so that the call order is the same as the
# order in which they appear in the iterable.
decorators = decorators[::-1]
else:
decorators = [decorators]
def _wrapper(self, *args, **kwargs):
# bound_method has the signature that 'decorator' expects i.e. no
# 'self' argument, but it's a closure over self so it can call
# 'func'. Also, wrap method.__get__() in a function because new
# attributes can't be set on bound method objects, only on functions.
bound_method = wraps(method)(partial(method.__get__(self, type(self))))
for dec in decorators:
bound_method = dec(bound_method)
return bound_method(*args, **kwargs)
# Copy any attributes that a decorator adds to the function it decorates.
for dec in decorators:
_update_method_wrapper(_wrapper, dec)
# Preserve any existing attributes of 'method', including the name.
update_wrapper(_wrapper, method)
return _wrapper
def method_decorator(decorator, name=""):
"""
Convert a function decorator into a method decorator
"""
# 'obj' can be a class or a function. If 'obj' is a function at the time it
# is passed to _dec, it will eventually be a method of the class it is
# defined on. If 'obj' is a class, the 'name' is required to be the name
# of the method that will be decorated.
def _dec(obj):
if not isinstance(obj, type):
return _multi_decorate(decorator, obj)
if not (name and hasattr(obj, name)):
raise ValueError(
"The keyword argument `name` must be the name of a method "
"of the decorated class: %s. Got '%s' instead." % (obj, name)
)
method = getattr(obj, name)
if not callable(method):
raise TypeError(
"Cannot decorate '%s' as it isn't a callable attribute of "
"%s (%s)." % (name, obj, method)
)
_wrapper = _multi_decorate(decorator, method)
setattr(obj, name, _wrapper)
return obj
# Don't worry about making _dec look similar to a list/tuple as it's rather
# meaningless.
if not hasattr(decorator, "__iter__"):
update_wrapper(_dec, decorator)
# Change the name to aid debugging.
obj = decorator if hasattr(decorator, "__name__") else decorator.__class__
_dec.__name__ = "method_decorator(%s)" % obj.__name__
return _dec
def decorator_from_middleware_with_args(middleware_class):
"""
Like decorator_from_middleware, but return a function
that accepts the arguments to be passed to the middleware_class.
Use like::
cache_page = decorator_from_middleware_with_args(CacheMiddleware)
# ...
@cache_page(3600)
def my_view(request):
# ...
"""
return make_middleware_decorator(middleware_class)
def decorator_from_middleware(middleware_class):
"""
Given a middleware class (not an instance), return a view decorator. This
lets you use middleware functionality on a per-view basis. The middleware
is created with no params passed.
"""
return make_middleware_decorator(middleware_class)()
def make_middleware_decorator(middleware_class):
def _make_decorator(*m_args, **m_kwargs):
def _decorator(view_func):
middleware = middleware_class(view_func, *m_args, **m_kwargs)
@wraps(view_func)
def _wrapper_view(request, *args, **kwargs):
if hasattr(middleware, "process_request"):
result = middleware.process_request(request)
if result is not None:
return result
if hasattr(middleware, "process_view"):
result = middleware.process_view(request, view_func, args, kwargs)
if result is not None:
return result
try:
response = view_func(request, *args, **kwargs)
except Exception as e:
if hasattr(middleware, "process_exception"):
result = middleware.process_exception(request, e)
if result is not None:
return result
raise
if hasattr(response, "render") and callable(response.render):
if hasattr(middleware, "process_template_response"):
response = middleware.process_template_response(
request, response
)
# Defer running of process_response until after the template
# has been rendered:
if hasattr(middleware, "process_response"):
def callback(response):
return middleware.process_response(request, response)
response.add_post_render_callback(callback)
else:
if hasattr(middleware, "process_response"):
return middleware.process_response(request, response)
return response
return _wrapper_view
return _decorator
return _make_decorator
def sync_and_async_middleware(func):
"""
Mark a middleware factory as returning a hybrid middleware supporting both
types of request.
"""
func.sync_capable = True
func.async_capable = True
return func
def sync_only_middleware(func):
"""
Mark a middleware factory as returning a sync middleware.
This is the default.
"""
func.sync_capable = True
func.async_capable = False
return func
def async_only_middleware(func):
"""Mark a middleware factory as returning an async middleware."""
func.sync_capable = False
func.async_capable = True
return func
|
f0ba923267f168dd0f6124d3112e944ff0137ab7c36109fb61ae0d23bd2031a5 | from decimal import Decimal
from django.conf import settings
from django.utils.safestring import mark_safe
def format(
number,
decimal_sep,
decimal_pos=None,
grouping=0,
thousand_sep="",
force_grouping=False,
use_l10n=None,
):
"""
Get a number (as a number or string), and return it as a string,
using formats defined as arguments:
* decimal_sep: Decimal separator symbol (for example ".")
* decimal_pos: Number of decimal positions
* grouping: Number of digits in every group limited by thousand separator.
For non-uniform digit grouping, it can be a sequence with the number
of digit group sizes following the format used by the Python locale
module in locale.localeconv() LC_NUMERIC grouping (e.g. (3, 2, 0)).
* thousand_sep: Thousand separator symbol (for example ",")
"""
if number is None or number == "":
return mark_safe(number)
if use_l10n is None:
use_l10n = True
use_grouping = use_l10n and settings.USE_THOUSAND_SEPARATOR
use_grouping = use_grouping or force_grouping
use_grouping = use_grouping and grouping != 0
# Make the common case fast
if isinstance(number, int) and not use_grouping and not decimal_pos:
return mark_safe(number)
# sign
sign = ""
# Treat potentially very large/small floats as Decimals.
if isinstance(number, float) and "e" in str(number).lower():
number = Decimal(str(number))
if isinstance(number, Decimal):
if decimal_pos is not None:
# If the provided number is too small to affect any of the visible
# decimal places, consider it equal to '0'.
cutoff = Decimal("0." + "1".rjust(decimal_pos, "0"))
if abs(number) < cutoff:
number = Decimal("0")
# Format values with more than 200 digits (an arbitrary cutoff) using
# scientific notation to avoid high memory usage in {:f}'.format().
_, digits, exponent = number.as_tuple()
if abs(exponent) + len(digits) > 200:
number = "{:e}".format(number)
coefficient, exponent = number.split("e")
# Format the coefficient.
coefficient = format(
coefficient,
decimal_sep,
decimal_pos,
grouping,
thousand_sep,
force_grouping,
use_l10n,
)
return "{}e{}".format(coefficient, exponent)
else:
str_number = "{:f}".format(number)
else:
str_number = str(number)
if str_number[0] == "-":
sign = "-"
str_number = str_number[1:]
# decimal part
if "." in str_number:
int_part, dec_part = str_number.split(".")
if decimal_pos is not None:
dec_part = dec_part[:decimal_pos]
else:
int_part, dec_part = str_number, ""
if decimal_pos is not None:
dec_part += "0" * (decimal_pos - len(dec_part))
dec_part = dec_part and decimal_sep + dec_part
# grouping
if use_grouping:
try:
# if grouping is a sequence
intervals = list(grouping)
except TypeError:
# grouping is a single value
intervals = [grouping, 0]
active_interval = intervals.pop(0)
int_part_gd = ""
cnt = 0
for digit in int_part[::-1]:
if cnt and cnt == active_interval:
if intervals:
active_interval = intervals.pop(0) or active_interval
int_part_gd += thousand_sep[::-1]
cnt = 0
int_part_gd += digit
cnt += 1
int_part = int_part_gd[::-1]
return sign + int_part + dec_part
|
5a0e1e22110702c38c10a9737d24bf6983dffa1ef40d8a8eaa79910c6d536db1 | """
Timezone-related classes and functions.
"""
import functools
import zoneinfo
from contextlib import ContextDecorator
from datetime import datetime, timedelta, timezone, tzinfo
from asgiref.local import Local
from django.conf import settings
__all__ = [
"get_fixed_timezone",
"get_default_timezone",
"get_default_timezone_name",
"get_current_timezone",
"get_current_timezone_name",
"activate",
"deactivate",
"override",
"localtime",
"localdate",
"now",
"is_aware",
"is_naive",
"make_aware",
"make_naive",
]
def get_fixed_timezone(offset):
"""Return a tzinfo instance with a fixed offset from UTC."""
if isinstance(offset, timedelta):
offset = offset.total_seconds() // 60
sign = "-" if offset < 0 else "+"
hhmm = "%02d%02d" % divmod(abs(offset), 60)
name = sign + hhmm
return timezone(timedelta(minutes=offset), name)
# In order to avoid accessing settings at compile time,
# wrap the logic in a function and cache the result.
@functools.lru_cache
def get_default_timezone():
"""
Return the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
"""
return zoneinfo.ZoneInfo(settings.TIME_ZONE)
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""Return the name of the default time zone."""
return _get_timezone_name(get_default_timezone())
_active = Local()
def get_current_timezone():
"""Return the currently active time zone as a tzinfo instance."""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""Return the name of the currently active time zone."""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""
Return the offset for fixed offset timezones, or the name of timezone if
not set.
"""
return timezone.tzname(None) or str(timezone)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Set the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, str):
_active.value = zoneinfo.ZoneInfo(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unset the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(ContextDecorator):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses django.utils.timezone.activate()
to set the timezone on entry and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, Django enables the default
time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
def __enter__(self):
self.old_timezone = getattr(_active, "value", None)
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Check if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (
isinstance(value, datetime)
and (settings.USE_TZ if use_tz is None else use_tz)
and not is_naive(value)
and getattr(value, "convert_to_local_time", True)
)
return localtime(value) if should_convert else value
# Utilities
def localtime(value=None, timezone=None):
"""
Convert an aware datetime.datetime to local time.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if value is None:
value = now()
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("localtime() cannot be applied to a naive datetime")
return value.astimezone(timezone)
def localdate(value=None, timezone=None):
"""
Convert an aware datetime to local time and return the value's date.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone is
specified.
"""
return localtime(value, timezone).date()
def now():
"""
Return an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
return datetime.now(tz=timezone.utc if settings.USE_TZ else None)
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def is_naive(value):
"""
Determine if a given datetime.datetime is naive.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is None
def make_aware(value, timezone=None):
"""Make a naive datetime.datetime in a given time zone aware."""
if timezone is None:
timezone = get_current_timezone()
# Check that we won't overwrite the timezone of an aware datetime.
if is_aware(value):
raise ValueError("make_aware expects a naive datetime, got %s" % value)
# This may be wrong around DST changes!
return value.replace(tzinfo=timezone)
def make_naive(value, timezone=None):
"""Make an aware datetime.datetime naive in a given time zone."""
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("make_naive() cannot be applied to a naive datetime")
return value.astimezone(timezone).replace(tzinfo=None)
def _datetime_ambiguous_or_imaginary(dt, tz):
return tz.utcoffset(dt.replace(fold=not dt.fold)) != tz.utcoffset(dt)
|
f1d95c2fd5c23ea580557d7eed3796f12a099844c09a68a48d38ae87ca25fcc6 | import inspect
import warnings
from asgiref.sync import iscoroutinefunction, markcoroutinefunction, sync_to_async
class RemovedInDjango51Warning(DeprecationWarning):
pass
class RemovedInDjango60Warning(PendingDeprecationWarning):
pass
RemovedInNextVersionWarning = RemovedInDjango51Warning
RemovedAfterNextVersionWarning = RemovedInDjango60Warning
class warn_about_renamed_method:
def __init__(
self, class_name, old_method_name, new_method_name, deprecation_warning
):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapper(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead."
% (self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning,
2,
)
return f(*args, **kwargs)
return wrapper
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super().__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`."
% (class_name, old_method_name, new_method_name),
deprecation_warning,
2,
)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning,
2,
)
return super().__instancecheck__(instance)
class MiddlewareMixin:
sync_capable = True
async_capable = True
def __init__(self, get_response):
if get_response is None:
raise ValueError("get_response must be provided.")
self.get_response = get_response
self._async_check()
super().__init__()
def __repr__(self):
return "<%s get_response=%s>" % (
self.__class__.__qualname__,
getattr(
self.get_response,
"__qualname__",
self.get_response.__class__.__name__,
),
)
def _async_check(self):
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
"""
if iscoroutinefunction(self.get_response):
# Mark the class as async-capable, but do the actual switch
# inside __call__ to avoid swapping out dunder methods
markcoroutinefunction(self)
def __call__(self, request):
# Exit out to async mode, if needed
if iscoroutinefunction(self):
return self.__acall__(request)
response = None
if hasattr(self, "process_request"):
response = self.process_request(request)
response = response or self.get_response(request)
if hasattr(self, "process_response"):
response = self.process_response(request, response)
return response
async def __acall__(self, request):
"""
Async version of __call__ that is swapped in when an async request
is running.
"""
response = None
if hasattr(self, "process_request"):
response = await sync_to_async(
self.process_request,
thread_sensitive=True,
)(request)
response = response or await self.get_response(request)
if hasattr(self, "process_response"):
response = await sync_to_async(
self.process_response,
thread_sensitive=True,
)(request, response)
return response
|
2b242d13d906380cc3a07b5ae079905e835105e7945357749f9ade539865bd19 | import codecs
import datetime
import locale
from decimal import Decimal
from types import NoneType
from urllib.parse import quote
from django.utils.functional import Promise
class DjangoUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
super().__init__(*args)
def __str__(self):
return "%s. You passed in %r (%s)" % (
super().__str__(),
self.obj,
type(self.obj),
)
def smart_str(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Return a string representing 's'. Treat bytestrings using the 'encoding'
codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_str(s, encoding, strings_only, errors)
_PROTECTED_TYPES = (
NoneType,
int,
float,
Decimal,
datetime.datetime,
datetime.date,
datetime.time,
)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_str(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES)
def force_str(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_str(), except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), str):
return s
if strings_only and is_protected_type(s):
return s
try:
if isinstance(s, bytes):
s = str(s, encoding, errors)
else:
s = str(s)
except UnicodeDecodeError as e:
raise DjangoUnicodeDecodeError(s, *e.args)
return s
def smart_bytes(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Return a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_bytes(s, encoding, strings_only, errors)
def force_bytes(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_bytes, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if isinstance(s, bytes):
if encoding == "utf-8":
return s
else:
return s.decode("utf-8", errors).encode(encoding, errors)
if strings_only and is_protected_type(s):
return s
if isinstance(s, memoryview):
return bytes(s)
return str(s).encode(encoding, errors)
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from RFC 3987 Section 3.1, slightly simplified since
the input is assumed to be a string rather than an arbitrary byte stream.
Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or
b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded
result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/').
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in RFC 3986 Sections 2.2 and 2.3:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.parse.quote() already considers all
# but the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of RFC 3987 Section 3.1 specifically mentions that % must not be
# converted.
if iri is None:
return iri
elif isinstance(iri, Promise):
iri = str(iri)
return quote(iri, safe="/#%[]=:;$&()+,!?*@'~")
# List of byte values that uri_to_iri() decodes from percent encoding.
# First, the unreserved characters from RFC 3986:
_ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)]
_hextobyte = {
(fmt % char).encode(): bytes((char,))
for ascii_range in _ascii_ranges
for char in ascii_range
for fmt in ["%02x", "%02X"]
}
# And then everything above 128, because bytes ≥ 128 are part of multibyte
# Unicode characters.
_hexdig = "0123456789ABCDEFabcdef"
_hextobyte.update(
{(a + b).encode(): bytes.fromhex(a + b) for a in _hexdig[8:] for b in _hexdig}
)
def uri_to_iri(uri):
"""
Convert a Uniform Resource Identifier(URI) into an Internationalized
Resource Identifier(IRI).
This is the algorithm from RFC 3987 Section 3.2, excluding step 4.
Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return
a string containing the encoded result (e.g. '/I%20♥%20Django/').
"""
if uri is None:
return uri
uri = force_bytes(uri)
# Fast selective unquote: First, split on '%' and then starting with the
# second block, decode the first 2 bytes if they represent a hex code to
# decode. The rest of the block is the part after '%AB', not containing
# any '%'. Add that to the output without further processing.
bits = uri.split(b"%")
if len(bits) == 1:
iri = uri
else:
parts = [bits[0]]
append = parts.append
hextobyte = _hextobyte
for item in bits[1:]:
hex = item[:2]
if hex in hextobyte:
append(hextobyte[item[:2]])
append(item[2:])
else:
append(b"%")
append(item)
iri = b"".join(parts)
return repercent_broken_unicode(iri).decode()
def escape_uri_path(path):
"""
Escape the unsafe characters from the path portion of a Uniform Resource
Identifier (URI).
"""
# These are the "reserved" and "unreserved" characters specified in RFC
# 3986 Sections 2.2 and 2.3:
# reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | ","
# unreserved = alphanum | mark
# mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")"
# The list of safe characters here is constructed subtracting ";", "=",
# and "?" according to RFC 3986 Section 3.3.
# The reason for not subtracting and escaping "/" is that we are escaping
# the entire path, not a path segment.
return quote(path, safe="/:@&+$,-_.!~*'()")
def punycode(domain):
"""Return the Punycode of the given domain if it's non-ASCII."""
return domain.encode("idna").decode("ascii")
def repercent_broken_unicode(path):
"""
As per RFC 3987 Section 3.2, step three of converting a URI into an IRI,
repercent-encode any octet produced that is not part of a strictly legal
UTF-8 octet sequence.
"""
while True:
try:
path.decode()
except UnicodeDecodeError as e:
# CVE-2019-14235: A recursion shouldn't be used since the exception
# handling uses massive amounts of memory
repercent = quote(path[e.start : e.end], safe=b"/#%[]=:;$&()+,!?*@'~")
path = path[: e.start] + repercent.encode() + path[e.end :]
else:
return path
def filepath_to_uri(path):
"""Convert a file system path to a URI portion that is suitable for
inclusion in a URL.
Encode certain chars that would normally be recognized as special chars
for URIs. Do not encode the ' character, as it is a valid character
within URIs. See the encodeURIComponent() JavaScript function for details.
"""
if path is None:
return path
# I know about `os.sep` and `os.altsep` but I want to leave
# some flexibility for hardcoding separators.
return quote(str(path).replace("\\", "/"), safe="/~!*()'")
def get_system_encoding():
"""
The encoding for the character type functions. Fallback to 'ascii' if the
#encoding is unsupported by Python or could not be determined. See tickets
#10335 and #5846.
"""
try:
encoding = locale.getlocale()[1] or "ascii"
codecs.lookup(encoding)
except Exception:
encoding = "ascii"
return encoding
DEFAULT_LOCALE_ENCODING = get_system_encoding()
|
dbd7ef65c58c971ed6c26200c237f4341d4b68ea546d043507337ef3a0041ef9 | """
Django's standard crypto functions and utilities.
"""
import hashlib
import hmac
import secrets
from django.conf import settings
from django.utils.encoding import force_bytes
class InvalidAlgorithm(ValueError):
"""Algorithm is not supported by hashlib."""
pass
def salted_hmac(key_salt, value, secret=None, *, algorithm="sha1"):
"""
Return the HMAC of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY). Default algorithm is SHA1,
but any algorithm name supported by hashlib can be passed.
A different key_salt should be passed in for every application of HMAC.
"""
if secret is None:
secret = settings.SECRET_KEY
key_salt = force_bytes(key_salt)
secret = force_bytes(secret)
try:
hasher = getattr(hashlib, algorithm)
except AttributeError as e:
raise InvalidAlgorithm(
"%r is not an algorithm accepted by the hashlib module." % algorithm
) from e
# We need to generate a derived key from our base key. We can do this by
# passing the key_salt and our base key through a pseudo-random function.
key = hasher(key_salt + secret).digest()
# If len(key_salt + secret) > block size of the hash algorithm, the above
# line is redundant and could be replaced by key = key_salt + secret, since
# the hmac module does the same thing for keys longer than the block size.
# However, we need to ensure that we *always* do this.
return hmac.new(key, msg=force_bytes(value), digestmod=hasher)
RANDOM_STRING_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
def get_random_string(length, allowed_chars=RANDOM_STRING_CHARS):
"""
Return a securely generated random string.
The bit length of the returned value can be calculated with the formula:
log_2(len(allowed_chars)^length)
For example, with default `allowed_chars` (26+26+10), this gives:
* length: 12, bit length =~ 71 bits
* length: 22, bit length =~ 131 bits
"""
return "".join(secrets.choice(allowed_chars) for i in range(length))
def constant_time_compare(val1, val2):
"""Return True if the two strings are equal, False otherwise."""
return secrets.compare_digest(force_bytes(val1), force_bytes(val2))
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
"""Return the hash of password using pbkdf2."""
if digest is None:
digest = hashlib.sha256
dklen = dklen or None
password = force_bytes(password)
salt = force_bytes(salt)
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
|
7c85f26dc0bfba8a972dab90c3adba4c6e95ca656682b7e16725f87e60e4a30c | import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware
from django.utils.translation import gettext, ngettext_lazy
TIME_STRINGS = {
"year": ngettext_lazy("%(num)d year", "%(num)d years", "num"),
"month": ngettext_lazy("%(num)d month", "%(num)d months", "num"),
"week": ngettext_lazy("%(num)d week", "%(num)d weeks", "num"),
"day": ngettext_lazy("%(num)d day", "%(num)d days", "num"),
"hour": ngettext_lazy("%(num)d hour", "%(num)d hours", "num"),
"minute": ngettext_lazy("%(num)d minute", "%(num)d minutes", "num"),
}
TIME_STRINGS_KEYS = list(TIME_STRINGS.keys())
TIME_CHUNKS = [
60 * 60 * 24 * 7, # week
60 * 60 * 24, # day
60 * 60, # hour
60, # minute
]
MONTHS_DAYS = (31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
def timesince(d, now=None, reversed=False, time_strings=None, depth=2):
"""
Take two datetime objects and return the time between d and now as a nicely
formatted string, e.g. "10 minutes". If d occurs after now, return
"0 minutes".
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored.
The algorithm takes into account the varying duration of years and months.
There is exactly "1 year, 1 month" between 2013/02/10 and 2014/03/10,
but also between 2007/08/10 and 2008/09/10 despite the delta being 393 days
in the former case and 397 in the latter.
Up to `depth` adjacent units will be displayed. For example,
"2 weeks, 3 days" and "1 year, 3 months" are possible outputs, but
"2 weeks, 3 hours" and "1 year, 5 days" are not.
`time_strings` is an optional dict of strings to replace the default
TIME_STRINGS dict.
`depth` is an optional integer to control the number of adjacent time
units returned.
Originally adapted from
https://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
Modified to improve results for years and months.
"""
if time_strings is None:
time_strings = TIME_STRINGS
if depth <= 0:
raise ValueError("depth must be greater than 0.")
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
now = now or datetime.datetime.now(datetime.timezone.utc if is_aware(d) else None)
if reversed:
d, now = now, d
delta = now - d
# Ignore microseconds.
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(time_strings["minute"] % {"num": 0})
# Get years and months.
total_months = (now.year - d.year) * 12 + (now.month - d.month)
if d.day > now.day or (d.day == now.day and d.time() > now.time()):
total_months -= 1
years, months = divmod(total_months, 12)
# Calculate the remaining time.
# Create a "pivot" datetime shifted from d by years and months, then use
# that to determine the other parts.
if years or months:
pivot_year = d.year + years
pivot_month = d.month + months
if pivot_month > 12:
pivot_month -= 12
pivot_year += 1
pivot = datetime.datetime(
pivot_year,
pivot_month,
min(MONTHS_DAYS[pivot_month - 1], d.day),
d.hour,
d.minute,
d.second,
tzinfo=d.tzinfo,
)
else:
pivot = d
remaining_time = (now - pivot).total_seconds()
partials = [years, months]
for chunk in TIME_CHUNKS:
count = int(remaining_time // chunk)
partials.append(count)
remaining_time -= chunk * count
# Find the first non-zero part (if any) and then build the result, until
# depth.
i = 0
for i, value in enumerate(partials):
if value != 0:
break
else:
return avoid_wrapping(time_strings["minute"] % {"num": 0})
result = []
current_depth = 0
while i < len(TIME_STRINGS_KEYS) and current_depth < depth:
value = partials[i]
if value == 0:
break
name = TIME_STRINGS_KEYS[i]
result.append(avoid_wrapping(time_strings[name] % {"num": value}))
current_depth += 1
i += 1
return gettext(", ").join(result)
def timeuntil(d, now=None, time_strings=None, depth=2):
"""
Like timesince, but return a string measuring the time until the given time.
"""
return timesince(d, now, reversed=True, time_strings=time_strings, depth=depth)
|
984b76fa4837cce436e1f5bb96dc590c569ab596724d27893fd5801963a0e943 | import copy
from collections.abc import Mapping
class OrderedSet:
"""
A set which keeps the ordering of the inserted items.
"""
def __init__(self, iterable=None):
self.dict = dict.fromkeys(iterable or ())
def add(self, item):
self.dict[item] = None
def remove(self, item):
del self.dict[item]
def discard(self, item):
try:
self.remove(item)
except KeyError:
pass
def __iter__(self):
return iter(self.dict)
def __reversed__(self):
return reversed(self.dict)
def __contains__(self, item):
return item in self.dict
def __bool__(self):
return bool(self.dict)
def __len__(self):
return len(self.dict)
def __repr__(self):
data = repr(list(self.dict)) if self.dict else ""
return f"{self.__class__.__qualname__}({data})"
class MultiValueDictKeyError(KeyError):
pass
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.getlist('doesnotexist')
[]
>>> d.getlist('doesnotexist', ['Adrian', 'Simon'])
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super().__init__(key_to_list_mapping)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, super().__repr__())
def __getitem__(self, key):
"""
Return the last data value for this key, or [] if it's an empty list;
raise KeyError if not found.
"""
try:
list_ = super().__getitem__(key)
except KeyError:
raise MultiValueDictKeyError(key)
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super().__setitem__(key, [value])
def __copy__(self):
return self.__class__([(k, v[:]) for k, v in self.lists()])
def __deepcopy__(self, memo):
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(
result, copy.deepcopy(key, memo), copy.deepcopy(value, memo)
)
return result
def __getstate__(self):
return {**self.__dict__, "_data": {k: self._getlist(k) for k in self}}
def __setstate__(self, obj_dict):
data = obj_dict.pop("_data", {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Return the last data value for the passed key. If key doesn't exist
or value is an empty list, return `default`.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def _getlist(self, key, default=None, force_list=False):
"""
Return a list of values for the key.
Used internally to manipulate values list. If force_list is True,
return a new copy of values.
"""
try:
values = super().__getitem__(key)
except KeyError:
if default is None:
return []
return default
else:
if force_list:
values = list(values) if values is not None else None
return values
def getlist(self, key, default=None):
"""
Return the list of values for the key. If key doesn't exist, return a
default value.
"""
return self._getlist(key, default, force_list=True)
def setlist(self, key, list_):
super().__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
# Do not return default here because __setitem__() may store
# another value -- QueryDict.__setitem__() does. Look it up.
return self[key]
def setlistdefault(self, key, default_list=None):
if key not in self:
if default_list is None:
default_list = []
self.setlist(key, default_list)
# Do not return default_list here because setlist() may store
# another value -- QueryDict.setlist() does. Look it up.
return self._getlist(key)
def appendlist(self, key, value):
"""Append an item to the internal list associated with key."""
self.setlistdefault(key).append(value)
def items(self):
"""
Yield (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self:
yield key, self[key]
def lists(self):
"""Yield (key, list) pairs."""
return iter(super().items())
def values(self):
"""Yield the last value on every key list."""
for key in self:
yield self[key]
def copy(self):
"""Return a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""Extend rather than replace existing key lists."""
if len(args) > 1:
raise TypeError("update expected at most 1 argument, got %d" % len(args))
if args:
arg = args[0]
if isinstance(arg, MultiValueDict):
for key, value_list in arg.lists():
self.setlistdefault(key).extend(value_list)
else:
if isinstance(arg, Mapping):
arg = arg.items()
for key, value in arg:
self.setlistdefault(key).append(value)
for key, value in kwargs.items():
self.setlistdefault(key).append(value)
def dict(self):
"""Return current object as a dict with singular values."""
return {key: self[key] for key in self}
class ImmutableList(tuple):
"""
A tuple-like object that raises useful errors when it is asked to mutate.
Example::
>>> a = ImmutableList(range(5), warning="You cannot mutate this.")
>>> a[3] = '4'
Traceback (most recent call last):
...
AttributeError: You cannot mutate this.
"""
def __new__(cls, *args, warning="ImmutableList object is immutable.", **kwargs):
self = tuple.__new__(cls, *args, **kwargs)
self.warning = warning
return self
def complain(self, *args, **kwargs):
raise AttributeError(self.warning)
# All list mutation functions complain.
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper(dict):
"""
Wrap accesses to a dictionary so that certain values (those starting with
the specified prefix) are passed through a function before being returned.
The prefix is removed before looking up the real value.
Used by the SQL construction code to ensure that values are correctly
quoted before being used.
"""
def __init__(self, data, func, prefix):
super().__init__(data)
self.func = func
self.prefix = prefix
def __getitem__(self, key):
"""
Retrieve the real value after stripping the prefix string (if
present). If the prefix is present, pass the value through self.func
before returning, otherwise return the raw value.
"""
use_func = key.startswith(self.prefix)
key = key.removeprefix(self.prefix)
value = super().__getitem__(key)
if use_func:
return self.func(value)
return value
class CaseInsensitiveMapping(Mapping):
"""
Mapping allowing case-insensitive key lookups. Original case of keys is
preserved for iteration and string representation.
Example::
>>> ci_map = CaseInsensitiveMapping({'name': 'Jane'})
>>> ci_map['Name']
Jane
>>> ci_map['NAME']
Jane
>>> ci_map['name']
Jane
>>> ci_map # original case preserved
{'name': 'Jane'}
"""
def __init__(self, data):
self._store = {k.lower(): (k, v) for k, v in self._unpack_items(data)}
def __getitem__(self, key):
return self._store[key.lower()][1]
def __len__(self):
return len(self._store)
def __eq__(self, other):
return isinstance(other, Mapping) and {
k.lower(): v for k, v in self.items()
} == {k.lower(): v for k, v in other.items()}
def __iter__(self):
return (original_key for original_key, value in self._store.values())
def __repr__(self):
return repr({key: value for key, value in self._store.values()})
def copy(self):
return self
@staticmethod
def _unpack_items(data):
# Explicitly test for dict first as the common case for performance,
# avoiding abc's __instancecheck__ and _abc_instancecheck for the
# general Mapping case.
if isinstance(data, (dict, Mapping)):
yield from data.items()
return
for i, elem in enumerate(data):
if len(elem) != 2:
raise ValueError(
"dictionary update sequence element #{} has length {}; "
"2 is required.".format(i, len(elem))
)
if not isinstance(elem[0], str):
raise ValueError(
"Element key %r invalid, only strings are allowed" % elem[0]
)
yield elem
|
d42d5ac6a7c80f83e39121abf502e6b5e8b85e41157c87ac29d21250d5aa521c | """
PHP date() style date formatting
See https://www.php.net/date for format strings
Usage:
>>> from datetime import datetime
>>> d = datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
import calendar
from datetime import date, datetime, time
from email.utils import format_datetime as format_datetime_rfc5322
from django.utils.dates import (
MONTHS,
MONTHS_3,
MONTHS_ALT,
MONTHS_AP,
WEEKDAYS,
WEEKDAYS_ABBR,
)
from django.utils.regex_helper import _lazy_re_compile
from django.utils.timezone import (
_datetime_ambiguous_or_imaginary,
get_default_timezone,
is_naive,
make_aware,
)
from django.utils.translation import gettext as _
re_formatchars = _lazy_re_compile(r"(?<!\\)([aAbcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])")
re_escaped = _lazy_re_compile(r"\\(.)")
class Formatter:
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(str(formatstr))):
if i % 2:
if type(self.data) is date and hasattr(TimeFormat, piece):
raise TypeError(
"The format for date objects may not contain "
"time-related format specifiers (found '%s')." % piece
)
pieces.append(str(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r"\1", piece))
return "".join(pieces)
class TimeFormat(Formatter):
def __init__(self, obj):
self.data = obj
self.timezone = None
if isinstance(obj, datetime):
# Timezone is only supported when formatting datetime objects, not
# date objects (timezone information not appropriate), or time
# objects (against established django policy).
if is_naive(obj):
timezone = get_default_timezone()
else:
timezone = obj.tzinfo
if not _datetime_ambiguous_or_imaginary(obj, timezone):
self.timezone = timezone
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _("p.m.")
return _("a.m.")
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _("PM")
return _("AM")
def e(self):
"""
Timezone name.
If timezone information is not available, return an empty string.
"""
if not self.timezone:
return ""
try:
if getattr(self.data, "tzinfo", None):
return self.data.tzname() or ""
except NotImplementedError:
pass
return ""
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
hour = self.data.hour % 12 or 12
minute = self.data.minute
return "%d:%02d" % (hour, minute) if minute else hour
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
return self.data.hour % 12 or 12
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return "%02d" % (self.data.hour % 12 or 12)
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return "%02d" % self.data.hour
def i(self):
"Minutes; i.e. '00' to '59'"
return "%02d" % self.data.minute
def O(self): # NOQA: E743, E741
"""
Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
If timezone information is not available, return an empty string.
"""
if self.timezone is None:
return ""
offset = self.timezone.utcoffset(self.data)
seconds = offset.days * 86400 + offset.seconds
sign = "-" if seconds < 0 else "+"
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _("midnight")
if self.data.minute == 0 and self.data.hour == 12:
return _("noon")
return "%s %s" % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return "%02d" % self.data.second
def T(self):
"""
Time zone of this machine; e.g. 'EST' or 'MDT'.
If timezone information is not available, return an empty string.
"""
if self.timezone is None:
return ""
return str(self.timezone.tzname(self.data))
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return "%06d" % self.data.microsecond
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
If timezone information is not available, return an empty string.
"""
if self.timezone is None:
return ""
offset = self.timezone.utcoffset(self.data)
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
class DateFormat(TimeFormat):
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return "%02d" % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self): # NOQA: E743, E741
"'1' if daylight saving time, '0' otherwise."
if self.timezone is None:
return ""
return "1" if self.timezone.dst(self.data) else "0"
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self): # NOQA: E743, E741
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return "%02d" % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar().year
def r(self):
"RFC 5322 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
value = self.data
if not isinstance(value, datetime):
# Assume midnight in default timezone if datetime.date provided.
default_timezone = get_default_timezone()
value = datetime.combine(value, time.min).replace(tzinfo=default_timezone)
elif is_naive(value):
value = make_aware(value, timezone=self.timezone)
return format_datetime_rfc5322(value)
def S(self):
"""
English ordinal suffix for the day of the month, 2 characters; i.e.
'st', 'nd', 'rd' or 'th'.
"""
if self.data.day in (11, 12, 13): # Special case
return "th"
last = self.data.day % 10
if last == 1:
return "st"
if last == 2:
return "nd"
if last == 3:
return "rd"
return "th"
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return calendar.monthrange(self.data.year, self.data.month)[1]
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
value = self.data
if not isinstance(value, datetime):
value = datetime.combine(value, time.min)
return int(value.timestamp())
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
return self.data.isocalendar().week
def y(self):
"""Year, 2 digits with leading zeros; e.g. '99'."""
return "%02d" % (self.data.year % 100)
def Y(self):
"""Year, 4 digits with leading zeros; e.g. '1999'."""
return "%04d" % self.data.year
def z(self):
"""Day of the year, i.e. 1 to 366."""
return self.data.timetuple().tm_yday
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
|
731b1457436e9e44735f8de368d35e8e0bc8bb657182b5cfaefd0daa5ff5bf58 | """HTML utilities suitable for global use."""
import html
import json
import re
from html.parser import HTMLParser
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit
from django.utils.encoding import punycode
from django.utils.functional import Promise, keep_lazy, keep_lazy_text
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, SafeString, mark_safe
from django.utils.text import normalize_newlines
@keep_lazy(SafeString)
def escape(text):
"""
Return the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
Always escape input, even if it's already escaped and marked as such.
This may result in double-escaping. If this is a concern, use
conditional_escape() instead.
"""
return SafeString(html.escape(str(text)))
_js_escapes = {
ord("\\"): "\\u005C",
ord("'"): "\\u0027",
ord('"'): "\\u0022",
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
ord("="): "\\u003D",
ord("-"): "\\u002D",
ord(";"): "\\u003B",
ord("`"): "\\u0060",
ord("\u2028"): "\\u2028",
ord("\u2029"): "\\u2029",
}
# Escape every ASCII character with a value less than 32.
_js_escapes.update((ord("%c" % z), "\\u%04X" % z) for z in range(32))
@keep_lazy(SafeString)
def escapejs(value):
"""Hex encode characters for use in JavaScript strings."""
return mark_safe(str(value).translate(_js_escapes))
_json_script_escapes = {
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
}
def json_script(value, element_id=None, encoder=None):
"""
Escape all the HTML/XML special characters with their unicode escapes, so
value is safe to be output anywhere except for inside a tag attribute. Wrap
the escaped JSON in a script tag.
"""
from django.core.serializers.json import DjangoJSONEncoder
json_str = json.dumps(value, cls=encoder or DjangoJSONEncoder).translate(
_json_script_escapes
)
if element_id:
template = '<script id="{}" type="application/json">{}</script>'
args = (element_id, mark_safe(json_str))
else:
template = '<script type="application/json">{}</script>'
args = (mark_safe(json_str),)
return format_html(template, *args)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
This function relies on the __html__ convention used both by Django's
SafeData class and by third-party libraries like markupsafe.
"""
if isinstance(text, Promise):
text = str(text)
if hasattr(text, "__html__"):
return text.__html__()
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but pass all arguments through conditional_escape(),
and call mark_safe() on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
args_safe = map(conditional_escape, args)
kwargs_safe = {k: conditional_escape(v) for (k, v) in kwargs.items()}
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(
conditional_escape(sep).join(
format_html(format_string, *args) for args in args_generator
)
)
@keep_lazy_text
def linebreaks(value, autoescape=False):
"""Convert newlines into <p> and <br>s."""
value = normalize_newlines(value)
paras = re.split("\n{2,}", str(value))
if autoescape:
paras = ["<p>%s</p>" % escape(p).replace("\n", "<br>") for p in paras]
else:
paras = ["<p>%s</p>" % p.replace("\n", "<br>") for p in paras]
return "\n\n".join(paras)
class MLStripper(HTMLParser):
def __init__(self):
super().__init__(convert_charrefs=False)
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append("&%s;" % name)
def handle_charref(self, name):
self.fed.append("&#%s;" % name)
def get_data(self):
return "".join(self.fed)
def _strip_once(value):
"""
Internal tag stripping utility used by strip_tags.
"""
s = MLStripper()
s.feed(value)
s.close()
return s.get_data()
@keep_lazy_text
def strip_tags(value):
"""Return the given HTML with all tags stripped."""
# Note: in typical case this loop executes _strip_once once. Loop condition
# is redundant, but helps to reduce number of executions of _strip_once.
value = str(value)
while "<" in value and ">" in value:
new_value = _strip_once(value)
if value.count("<") == new_value.count("<"):
# _strip_once wasn't able to detect more tags.
break
value = new_value
return value
@keep_lazy_text
def strip_spaces_between_tags(value):
"""Return the given HTML with spaces between tags removed."""
return re.sub(r">\s+<", "><", str(value))
def smart_urlquote(url):
"""Quote a URL if it isn't already quoted."""
def unquote_quote(segment):
segment = unquote(segment)
# Tilde is part of RFC 3986 Section 2.3 Unreserved Characters,
# see also https://bugs.python.org/issue16285
return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + "~")
# Handle IDN before quoting.
try:
scheme, netloc, path, query, fragment = urlsplit(url)
except ValueError:
# invalid IPv6 URL (normally square brackets in hostname part).
return unquote_quote(url)
try:
netloc = punycode(netloc) # IDN -> ACE
except UnicodeError: # invalid domain part
return unquote_quote(url)
if query:
# Separately unquoting key/value, so as to not mix querystring separators
# included in query values. See #22267.
query_parts = [
(unquote(q[0]), unquote(q[1]))
for q in parse_qsl(query, keep_blank_values=True)
]
# urlencode will take care of quoting
query = urlencode(query_parts)
path = unquote_quote(path)
fragment = unquote_quote(fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
class Urlizer:
"""
Convert any URLs in text into clickable links.
Work on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
"""
trailing_punctuation_chars = ".,:;!"
wrapping_punctuation = [("(", ")"), ("[", "]")]
simple_url_re = _lazy_re_compile(r"^https?://\[?\w", re.IGNORECASE)
simple_url_2_re = _lazy_re_compile(
r"^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$", re.IGNORECASE
)
word_split_re = _lazy_re_compile(r"""([\s<>"']+)""")
mailto_template = "mailto:{local}@{domain}"
url_template = '<a href="{href}"{attrs}>{url}</a>'
def __call__(self, text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
If trim_url_limit is not None, truncate the URLs in the link text
longer than this limit to trim_url_limit - 1 characters and append an
ellipsis.
If nofollow is True, give the links a rel="nofollow" attribute.
If autoescape is True, autoescape the link text and URLs.
"""
safe_input = isinstance(text, SafeData)
words = self.word_split_re.split(str(text))
return "".join(
[
self.handle_word(
word,
safe_input=safe_input,
trim_url_limit=trim_url_limit,
nofollow=nofollow,
autoescape=autoescape,
)
for word in words
]
)
def handle_word(
self,
word,
*,
safe_input,
trim_url_limit=None,
nofollow=False,
autoescape=False,
):
if "." in word or "@" in word or ":" in word:
# lead: Punctuation trimmed from the beginning of the word.
# middle: State of the word.
# trail: Punctuation trimmed from the end of the word.
lead, middle, trail = self.trim_punctuation(word)
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ""
if self.simple_url_re.match(middle):
url = smart_urlquote(html.unescape(middle))
elif self.simple_url_2_re.match(middle):
url = smart_urlquote("http://%s" % html.unescape(middle))
elif ":" not in middle and self.is_email_simple(middle):
local, domain = middle.rsplit("@", 1)
try:
domain = punycode(domain)
except UnicodeError:
return word
url = self.mailto_template.format(local=local, domain=domain)
nofollow_attr = ""
# Make link.
if url:
trimmed = self.trim_url(middle, limit=trim_url_limit)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
trimmed = escape(trimmed)
middle = self.url_template.format(
href=escape(url),
attrs=nofollow_attr,
url=trimmed,
)
return mark_safe(f"{lead}{middle}{trail}")
else:
if safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
elif safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
return word
def trim_url(self, x, *, limit):
if limit is None or len(x) <= limit:
return x
return "%s…" % x[: max(0, limit - 1)]
def trim_punctuation(self, word):
"""
Trim trailing and wrapping punctuation from `word`. Return the items of
the new state.
"""
lead, middle, trail = "", word, ""
# Continue trimming until middle remains unchanged.
trimmed_something = True
while trimmed_something:
trimmed_something = False
# Trim wrapping punctuation.
for opening, closing in self.wrapping_punctuation:
if middle.startswith(opening):
middle = middle.removeprefix(opening)
lead += opening
trimmed_something = True
# Keep parentheses at the end only if they're balanced.
if (
middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1
):
middle = middle.removesuffix(closing)
trail = closing + trail
trimmed_something = True
# Trim trailing punctuation (after trimming wrapping punctuation,
# as encoded entities contain ';'). Unescape entities to avoid
# breaking them by removing ';'.
middle_unescaped = html.unescape(middle)
stripped = middle_unescaped.rstrip(self.trailing_punctuation_chars)
if middle_unescaped != stripped:
punctuation_count = len(middle_unescaped) - len(stripped)
trail = middle[-punctuation_count:] + trail
middle = middle[:-punctuation_count]
trimmed_something = True
return lead, middle, trail
@staticmethod
def is_email_simple(value):
"""Return True if value looks like an email address."""
# An @ must be in the middle of the value.
if "@" not in value or value.startswith("@") or value.endswith("@"):
return False
try:
p1, p2 = value.split("@")
except ValueError:
# value contains more than one @.
return False
# Dot must be in p2 (e.g. example.com)
if "." not in p2 or p2.startswith("."):
return False
return True
urlizer = Urlizer()
@keep_lazy_text
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
return urlizer(
text, trim_url_limit=trim_url_limit, nofollow=nofollow, autoescape=autoescape
)
def avoid_wrapping(value):
"""
Avoid text wrapping in the middle of a phrase by adding non-breaking
spaces where there previously were normal spaces.
"""
return value.replace(" ", "\xa0")
def html_safe(klass):
"""
A decorator that defines the __html__ method. This helps non-Django
templates to detect classes whose __str__ methods return SafeString.
"""
if "__html__" in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it defines "
"__html__()." % klass.__name__
)
if "__str__" not in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it doesn't "
"define __str__()." % klass.__name__
)
klass_str = klass.__str__
klass.__str__ = lambda self: mark_safe(klass_str(self))
klass.__html__ = lambda self: str(self)
return klass
|
1ac4c77c1070822bb0c77391f24c624d3adfea4de744979f05284599d0112ca0 | import copy
import itertools
import operator
from functools import total_ordering, wraps
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
"""
name = None
@staticmethod
def func(instance):
raise TypeError(
"Cannot use cached_property instance without calling "
"__set_name__() on it."
)
def __init__(self, func):
self.real_func = func
self.__doc__ = getattr(func, "__doc__")
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
class classproperty:
"""
Decorator that converts a method with a single cls argument into a property
that can be accessed directly from the class.
"""
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, cls=None):
return self.fget(cls)
def getter(self, method):
self.fget = method
return self
class Promise:
"""
Base class for the proxy class created in the closure of the lazy function.
It's used to recognize promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turn any callable into a lazy evaluated callable. result classes or types
is required -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
@total_ordering
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__prepared = False
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if not self.__prepared:
self.__prepare_class__()
self.__class__.__prepared = True
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses,
)
def __repr__(self):
return repr(self.__cast())
@classmethod
def __prepare_class__(cls):
for resultclass in resultclasses:
for type_ in resultclass.mro():
for method_name in type_.__dict__:
# All __promise__ return the same wrapper method, they
# look up the correct implementation when called.
if hasattr(cls, method_name):
continue
meth = cls.__promise__(method_name)
setattr(cls, method_name, meth)
cls._delegate_bytes = bytes in resultclasses
cls._delegate_text = str in resultclasses
if cls._delegate_bytes and cls._delegate_text:
raise ValueError(
"Cannot call lazy() with both bytes and text return types."
)
if cls._delegate_text:
cls.__str__ = cls.__text_cast
elif cls._delegate_bytes:
cls.__bytes__ = cls.__bytes_cast
@classmethod
def __promise__(cls, method_name):
# Builds a wrapper around some magic method
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
return getattr(res, method_name)(*args, **kw)
return __wrapper__
def __text_cast(self):
return func(*self.__args, **self.__kw)
def __bytes_cast(self):
return bytes(func(*self.__args, **self.__kw))
def __bytes_cast_encoded(self):
return func(*self.__args, **self.__kw).encode()
def __cast(self):
if self._delegate_bytes:
return self.__bytes_cast()
elif self._delegate_text:
return self.__text_cast()
else:
return func(*self.__args, **self.__kw)
def __str__(self):
# object defines __str__(), so __prepare_class__() won't overload
# a __str__() method from the proxied class.
return str(self.__cast())
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() == other
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() < other
def __hash__(self):
return hash(self.__cast())
def __mod__(self, rhs):
if self._delegate_text:
return str(self) % rhs
return self.__cast() % rhs
def __add__(self, other):
return self.__cast() + other
def __radd__(self, other):
return other + self.__cast()
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def lazystr(text):
"""
Shortcut for the common case of a lazy callable that returns str.
"""
return lazy(str, str)(text)
def keep_lazy(*resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
if not resultclasses:
raise TypeError("You must pass at least one argument to keep_lazy().")
def decorator(func):
lazy_func = lazy(func, *resultclasses)
@wraps(func)
def wrapper(*args, **kwargs):
if any(
isinstance(arg, Promise)
for arg in itertools.chain(args, kwargs.values())
):
return lazy_func(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
return decorator
def keep_lazy_text(func):
"""
A decorator for functions that accept lazy arguments and return text.
"""
return keep_lazy(str)(func)
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if (_wrapped := self._wrapped) is empty:
self._setup()
_wrapped = self._wrapped
return func(_wrapped, *args)
inner._mask_wrapped = False
return inner
class LazyObject:
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
# Avoid infinite recursion when tracing __init__ (#19456).
_wrapped = None
def __init__(self):
# Note: if a subclass overrides __init__(), it will likely need to
# override __copy__() and __deepcopy__() as well.
self._wrapped = empty
def __getattribute__(self, name):
if name == "_wrapped":
# Avoid recursion when getting wrapped object.
return super().__getattribute__(name)
value = super().__getattribute__(name)
# If attribute is a proxy method, raise an AttributeError to call
# __getattr__() and use the wrapped object method.
if not getattr(value, "_mask_wrapped", True):
raise AttributeError
return value
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError(
"subclasses of LazyObject must provide a _setup() method"
)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
# object to successfully pickle it, so we might as well just pickle the
# wrapped object since they're supposed to act the same way.
#
# Unfortunately, if we try to simply act like the wrapped object, the ruse
# will break down when pickle gets our id(). Thus we end up with pickle
# thinking, in effect, that we are a distinct object from the wrapped
# object, but with the same __dict__. This can cause problems (see #25389).
#
# So instead, we define our own __reduce__ method and custom unpickler. We
# pickle the wrapped object as the unpickler's argument, so that pickle
# will pickle it normally, and then the unpickler simply returns its
# argument.
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use type(self), not
# self.__class__, because the latter is proxied.
return type(self)()
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__lt__ = new_method_proxy(operator.lt)
__gt__ = new_method_proxy(operator.gt)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
def unpickle_lazyobject(wrapped):
"""
Used to unpickle lazy objects. Just return its argument, which will be the
wrapped object.
"""
return wrapped
class SimpleLazyObject(LazyObject):
"""
A lazy object initialized from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__["_setupfunc"] = func
super().__init__()
def _setup(self):
self._wrapped = self._setupfunc()
# Return a meaningful representation of the lazy object for debugging
# without evaluating the wrapped object.
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._setupfunc
else:
repr_attr = self._wrapped
return "<%s: %r>" % (type(self).__name__, repr_attr)
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use SimpleLazyObject, not
# self.__class__, because the latter is proxied.
return SimpleLazyObject(self._setupfunc)
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__add__ = new_method_proxy(operator.add)
@new_method_proxy
def __radd__(self, other):
return other + self
def partition(predicate, values):
"""
Split the values into two sets, based on the return value of the function
(True/False). e.g.:
>>> partition(lambda x: x > 3, range(5))
[0, 1, 2, 3], [4]
"""
results = ([], [])
for item in values:
results[predicate(item)].append(item)
return results
|
0f82e1c83e5f209c9046be7fe7efe13e68789a62d1eeb34ccffa3949422c8824 | import base64
import datetime
import re
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from urllib.parse import quote, unquote
from urllib.parse import urlencode as original_urlencode
from urllib.parse import urlparse
from django.utils.datastructures import MultiValueDict
from django.utils.regex_helper import _lazy_re_compile
# Based on RFC 9110 Appendix A.
ETAG_MATCH = _lazy_re_compile(
r"""
\A( # start of string and capture group
(?:W/)? # optional weak indicator
" # opening quote
[^"]* # any sequence of non-quote characters
" # end quote
)\Z # end of string and capture group
""",
re.X,
)
MONTHS = "jan feb mar apr may jun jul aug sep oct nov dec".split()
__D = r"(?P<day>[0-9]{2})"
__D2 = r"(?P<day>[ 0-9][0-9])"
__M = r"(?P<mon>\w{3})"
__Y = r"(?P<year>[0-9]{4})"
__Y2 = r"(?P<year>[0-9]{2})"
__T = r"(?P<hour>[0-9]{2}):(?P<min>[0-9]{2}):(?P<sec>[0-9]{2})"
RFC1123_DATE = _lazy_re_compile(r"^\w{3}, %s %s %s %s GMT$" % (__D, __M, __Y, __T))
RFC850_DATE = _lazy_re_compile(r"^\w{6,9}, %s-%s-%s %s GMT$" % (__D, __M, __Y2, __T))
ASCTIME_DATE = _lazy_re_compile(r"^\w{3} %s %s %s %s$" % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = ":/?#[]@"
RFC3986_SUBDELIMS = "!$&'()*+,;="
def urlencode(query, doseq=False):
"""
A version of Python's urllib.parse.urlencode() function that can operate on
MultiValueDict and non-string values.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, "items"):
query = query.items()
query_params = []
for key, value in query:
if value is None:
raise TypeError(
"Cannot encode None for key '%s' in a query string. Did you "
"mean to pass an empty string or omit the value?" % key
)
elif not doseq or isinstance(value, (str, bytes)):
query_val = value
else:
try:
itr = iter(value)
except TypeError:
query_val = value
else:
# Consume generators and iterators, when doseq=True, to
# work around https://bugs.python.org/issue31706.
query_val = []
for item in itr:
if item is None:
raise TypeError(
"Cannot encode None for key '%s' in a query "
"string. Did you mean to pass an empty string or "
"omit the value?" % key
)
elif not isinstance(item, bytes):
item = str(item)
query_val.append(item)
query_params.append((key, query_val))
return original_urlencode(query_params, doseq)
def http_date(epoch_seconds=None):
"""
Format the time to match the RFC 5322 date format as specified by RFC 9110
Section 5.6.7.
`epoch_seconds` is a floating point number expressed in seconds since the
epoch, in UTC - such as that outputted by time.time(). If set to None, it
defaults to the current time.
Output a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parse a date format as specified by HTTP RFC 9110 Section 5.6.7.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
"""
# email.utils.parsedate() does the job for RFC 1123 dates; unfortunately
# RFC 9110 makes it mandatory to support RFC 850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
tz = datetime.timezone.utc
year = int(m["year"])
if year < 100:
current_year = datetime.datetime.now(tz=tz).year
current_century = current_year - (current_year % 100)
if year - (current_year % 100) > 50:
# year that appears to be more than 50 years in the future are
# interpreted as representing the past.
year += current_century - 100
else:
year += current_century
month = MONTHS.index(m["mon"].lower()) + 1
day = int(m["day"])
hour = int(m["hour"])
min = int(m["min"])
sec = int(m["sec"])
result = datetime.datetime(year, month, day, hour, min, sec, tzinfo=tz)
return int(result.timestamp())
except Exception as exc:
raise ValueError("%r is not a valid date" % date) from exc
def parse_http_date_safe(date):
"""
Same as parse_http_date, but return None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Convert a base 36 string to an int. Raise ValueError if the input won't fit
into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is longer than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
return int(s, 36)
def int_to_base36(i):
"""Convert an integer to a base36 string."""
char_set = "0123456789abcdefghijklmnopqrstuvwxyz"
if i < 0:
raise ValueError("Negative base36 conversion input.")
if i < 36:
return char_set[i]
b36 = ""
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encode a bytestring to a base64 string for use in URLs. Strip any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b"\n=").decode("ascii")
def urlsafe_base64_decode(s):
"""
Decode a base64 encoded string. Add back any trailing equal signs that
might have been stripped.
"""
s = s.encode()
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b"="))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parse a string of ETags given in an If-None-Match or If-Match header as
defined by RFC 9110. Return a list of quoted ETags, or ['*'] if all ETags
should be matched.
"""
if etag_str.strip() == "*":
return ["*"]
else:
# Parse each ETag individually, and return any that are valid.
etag_matches = (ETAG_MATCH.match(etag.strip()) for etag in etag_str.split(","))
return [match[1] for match in etag_matches if match]
def quote_etag(etag_str):
"""
If the provided string is already a quoted ETag, return it. Otherwise, wrap
the string in quotes, making it a strong ETag.
"""
if ETAG_MATCH.match(etag_str):
return etag_str
else:
return '"%s"' % etag_str
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == "."
and (host.endswith(pattern) or host == pattern[1:])
or pattern == host
)
def url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False):
"""
Return ``True`` if the url uses an allowed host and a safe scheme.
Always return ``False`` on an empty url.
If ``require_https`` is ``True``, only 'https' will be considered a valid
scheme, as opposed to 'http' and 'https' with the default, ``False``.
Note: "True" doesn't entail that a URL is "safe". It may still be e.g.
quoted incorrectly. Ensure to also use django.utils.encoding.iri_to_uri()
on the path component of untrusted URLs.
"""
if url is not None:
url = url.strip()
if not url:
return False
if allowed_hosts is None:
allowed_hosts = set()
elif isinstance(allowed_hosts, str):
allowed_hosts = {allowed_hosts}
# Chrome treats \ completely as / in paths but it could be part of some
# basic auth credentials so we need to check both URLs.
return _url_has_allowed_host_and_scheme(
url, allowed_hosts, require_https=require_https
) and _url_has_allowed_host_and_scheme(
url.replace("\\", "/"), allowed_hosts, require_https=require_https
)
def _url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False):
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith("///"):
return False
try:
url_info = urlparse(url)
except ValueError: # e.g. invalid IPv6 addresses
return False
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == "C":
return False
scheme = url_info.scheme
# Consider URLs without a scheme (e.g. //example.com/p) to be http.
if not url_info.scheme and url_info.netloc:
scheme = "http"
valid_schemes = ["https"] if require_https else ["http", "https"]
return (not url_info.netloc or url_info.netloc in allowed_hosts) and (
not scheme or scheme in valid_schemes
)
def escape_leading_slashes(url):
"""
If redirecting to an absolute path (two leading slashes), a slash must be
escaped to prevent browsers from handling the path as schemaless and
redirecting to another host.
"""
if url.startswith("//"):
url = "/%2F{}".format(url.removeprefix("//"))
return url
def _parseparam(s):
while s[:1] == ";":
s = s[1:]
end = s.find(";")
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(";", end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]
def parse_header_parameters(line):
"""
Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
parts = _parseparam(";" + line)
key = parts.__next__().lower()
pdict = {}
for p in parts:
i = p.find("=")
if i >= 0:
has_encoding = False
name = p[:i].strip().lower()
if name.endswith("*"):
# Lang/encoding embedded in the value (like "filename*=UTF-8''file.ext")
# https://tools.ietf.org/html/rfc2231#section-4
name = name[:-1]
if p.count("'") == 2:
has_encoding = True
value = p[i + 1 :].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace("\\\\", "\\").replace('\\"', '"')
if has_encoding:
encoding, lang, value = value.split("'")
value = unquote(value, encoding=encoding)
pdict[name] = value
return key, pdict
def content_disposition_header(as_attachment, filename):
"""
Construct a Content-Disposition HTTP header value from the given filename
as specified by RFC 6266.
"""
if filename:
disposition = "attachment" if as_attachment else "inline"
try:
filename.encode("ascii")
file_expr = 'filename="{}"'.format(
filename.replace("\\", "\\\\").replace('"', r"\"")
)
except UnicodeEncodeError:
file_expr = "filename*=utf-8''{}".format(quote(filename))
return f"{disposition}; {file_expr}"
elif as_attachment:
return "attachment"
else:
return None
|
aa2b4cbe4ba905b275a608d01527143e62bc0b0c0630628677729b42c5934151 | """
This module contains helper functions for controlling caching. It does so by
managing the "Vary" header of responses. It includes functions to patch the
header of response objects directly and decorators that change functions to do
that header-patching themselves.
For information on the Vary header, see RFC 9110 Section 12.5.5.
Essentially, the "Vary" HTTP header defines which headers a cache should take
into account when building its cache key. Requests with the same path but
different header content for headers named in "Vary" need to get different
cache keys to prevent delivery of wrong content.
An example: i18n middleware would need to distinguish caches by the
"Accept-language" header.
"""
import time
from collections import defaultdict
from hashlib import md5
from django.conf import settings
from django.core.cache import caches
from django.http import HttpResponse, HttpResponseNotModified
from django.utils.http import http_date, parse_etags, parse_http_date_safe, quote_etag
from django.utils.log import log_response
from django.utils.regex_helper import _lazy_re_compile
from django.utils.timezone import get_current_timezone_name
from django.utils.translation import get_language
cc_delim_re = _lazy_re_compile(r"\s*,\s*")
def patch_cache_control(response, **kwargs):
"""
Patch the Cache-Control header by adding all keyword arguments to it.
The transformation is as follows:
* All keyword parameter names are turned to lowercase, and underscores
are converted to hyphens.
* If the value of a parameter is True (exactly True, not just a
true value), only the parameter name is added to the header.
* All other parameters are added with their value, after applying
str() to it.
"""
def dictitem(s):
t = s.split("=", 1)
if len(t) > 1:
return (t[0].lower(), t[1])
else:
return (t[0].lower(), True)
def dictvalue(*t):
if t[1] is True:
return t[0]
else:
return "%s=%s" % (t[0], t[1])
cc = defaultdict(set)
if response.get("Cache-Control"):
for field in cc_delim_re.split(response.headers["Cache-Control"]):
directive, value = dictitem(field)
if directive == "no-cache":
# no-cache supports multiple field names.
cc[directive].add(value)
else:
cc[directive] = value
# If there's already a max-age header but we're being asked to set a new
# max-age, use the minimum of the two ages. In practice this happens when
# a decorator and a piece of middleware both operate on a given view.
if "max-age" in cc and "max_age" in kwargs:
kwargs["max_age"] = min(int(cc["max-age"]), kwargs["max_age"])
# Allow overriding private caching and vice versa
if "private" in cc and "public" in kwargs:
del cc["private"]
elif "public" in cc and "private" in kwargs:
del cc["public"]
for k, v in kwargs.items():
directive = k.replace("_", "-")
if directive == "no-cache":
# no-cache supports multiple field names.
cc[directive].add(v)
else:
cc[directive] = v
directives = []
for directive, values in cc.items():
if isinstance(values, set):
if True in values:
# True takes precedence.
values = {True}
directives.extend([dictvalue(directive, value) for value in values])
else:
directives.append(dictvalue(directive, values))
cc = ", ".join(directives)
response.headers["Cache-Control"] = cc
def get_max_age(response):
"""
Return the max-age from the response Cache-Control header as an integer,
or None if it wasn't found or wasn't an integer.
"""
if not response.has_header("Cache-Control"):
return
cc = dict(
_to_tuple(el) for el in cc_delim_re.split(response.headers["Cache-Control"])
)
try:
return int(cc["max-age"])
except (ValueError, TypeError, KeyError):
pass
def set_response_etag(response):
if not response.streaming and response.content:
response.headers["ETag"] = quote_etag(
md5(response.content, usedforsecurity=False).hexdigest(),
)
return response
def _precondition_failed(request):
response = HttpResponse(status=412)
log_response(
"Precondition Failed: %s",
request.path,
response=response,
request=request,
)
return response
def _not_modified(request, response=None):
new_response = HttpResponseNotModified()
if response:
# Preserve the headers required by RFC 9110 Section 15.4.5, as well as
# Last-Modified.
for header in (
"Cache-Control",
"Content-Location",
"Date",
"ETag",
"Expires",
"Last-Modified",
"Vary",
):
if header in response:
new_response.headers[header] = response.headers[header]
# Preserve cookies as per the cookie specification: "If a proxy server
# receives a response which contains a Set-cookie header, it should
# propagate the Set-cookie header to the client, regardless of whether
# the response was 304 (Not Modified) or 200 (OK).
# https://curl.haxx.se/rfc/cookie_spec.html
new_response.cookies = response.cookies
return new_response
def get_conditional_response(request, etag=None, last_modified=None, response=None):
# Only return conditional responses on successful requests.
if response and not (200 <= response.status_code < 300):
return response
# Get HTTP request headers.
if_match_etags = parse_etags(request.META.get("HTTP_IF_MATCH", ""))
if_unmodified_since = request.META.get("HTTP_IF_UNMODIFIED_SINCE")
if_unmodified_since = if_unmodified_since and parse_http_date_safe(
if_unmodified_since
)
if_none_match_etags = parse_etags(request.META.get("HTTP_IF_NONE_MATCH", ""))
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since)
# Evaluation of request preconditions below follows RFC 9110 Section
# 13.2.2.
# Step 1: Test the If-Match precondition.
if if_match_etags and not _if_match_passes(etag, if_match_etags):
return _precondition_failed(request)
# Step 2: Test the If-Unmodified-Since precondition.
if (
not if_match_etags
and if_unmodified_since
and not _if_unmodified_since_passes(last_modified, if_unmodified_since)
):
return _precondition_failed(request)
# Step 3: Test the If-None-Match precondition.
if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags):
if request.method in ("GET", "HEAD"):
return _not_modified(request, response)
else:
return _precondition_failed(request)
# Step 4: Test the If-Modified-Since precondition.
if (
not if_none_match_etags
and if_modified_since
and not _if_modified_since_passes(last_modified, if_modified_since)
and request.method in ("GET", "HEAD")
):
return _not_modified(request, response)
# Step 5: Test the If-Range precondition (not supported).
# Step 6: Return original response since there isn't a conditional response.
return response
def _if_match_passes(target_etag, etags):
"""
Test the If-Match comparison as defined in RFC 9110 Section 13.1.1.
"""
if not target_etag:
# If there isn't an ETag, then there can't be a match.
return False
elif etags == ["*"]:
# The existence of an ETag means that there is "a current
# representation for the target resource", even if the ETag is weak,
# so there is a match to '*'.
return True
elif target_etag.startswith("W/"):
# A weak ETag can never strongly match another ETag.
return False
else:
# Since the ETag is strong, this will only return True if there's a
# strong match.
return target_etag in etags
def _if_unmodified_since_passes(last_modified, if_unmodified_since):
"""
Test the If-Unmodified-Since comparison as defined in RFC 9110 Section
13.1.4.
"""
return last_modified and last_modified <= if_unmodified_since
def _if_none_match_passes(target_etag, etags):
"""
Test the If-None-Match comparison as defined in RFC 9110 Section 13.1.2.
"""
if not target_etag:
# If there isn't an ETag, then there isn't a match.
return True
elif etags == ["*"]:
# The existence of an ETag means that there is "a current
# representation for the target resource", so there is a match to '*'.
return False
else:
# The comparison should be weak, so look for a match after stripping
# off any weak indicators.
target_etag = target_etag.strip("W/")
etags = (etag.strip("W/") for etag in etags)
return target_etag not in etags
def _if_modified_since_passes(last_modified, if_modified_since):
"""
Test the If-Modified-Since comparison as defined in RFC 9110 Section
13.1.3.
"""
return not last_modified or last_modified > if_modified_since
def patch_response_headers(response, cache_timeout=None):
"""
Add HTTP caching headers to the given HttpResponse: Expires and
Cache-Control.
Each header is only added if it isn't already set.
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
by default.
"""
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0:
cache_timeout = 0 # Can't have max-age negative
if not response.has_header("Expires"):
response.headers["Expires"] = http_date(time.time() + cache_timeout)
patch_cache_control(response, max_age=cache_timeout)
def add_never_cache_headers(response):
"""
Add headers to a response to indicate that a page should never be cached.
"""
patch_response_headers(response, cache_timeout=-1)
patch_cache_control(
response, no_cache=True, no_store=True, must_revalidate=True, private=True
)
def patch_vary_headers(response, newheaders):
"""
Add (or update) the "Vary" header in the given HttpResponse object.
newheaders is a list of header names that should be in "Vary". If headers
contains an asterisk, then "Vary" header will consist of a single asterisk
'*'. Otherwise, existing headers in "Vary" aren't removed.
"""
# Note that we need to keep the original order intact, because cache
# implementations may rely on the order of the Vary contents in, say,
# computing an MD5 hash.
if response.has_header("Vary"):
vary_headers = cc_delim_re.split(response.headers["Vary"])
else:
vary_headers = []
# Use .lower() here so we treat headers as case-insensitive.
existing_headers = {header.lower() for header in vary_headers}
additional_headers = [
newheader
for newheader in newheaders
if newheader.lower() not in existing_headers
]
vary_headers += additional_headers
if "*" in vary_headers:
response.headers["Vary"] = "*"
else:
response.headers["Vary"] = ", ".join(vary_headers)
def has_vary_header(response, header_query):
"""
Check to see if the response has a given header name in its Vary header.
"""
if not response.has_header("Vary"):
return False
vary_headers = cc_delim_re.split(response.headers["Vary"])
existing_headers = {header.lower() for header in vary_headers}
return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key):
"""If necessary, add the current locale or time zone to the cache key."""
if settings.USE_I18N:
# first check if LocaleMiddleware or another middleware added
# LANGUAGE_CODE to request, then fall back to the active language
# which in turn can also fall back to settings.LANGUAGE_CODE
cache_key += ".%s" % getattr(request, "LANGUAGE_CODE", get_language())
if settings.USE_TZ:
cache_key += ".%s" % get_current_timezone_name()
return cache_key
def _generate_cache_key(request, method, headerlist, key_prefix):
"""Return a cache key from the headers given in the header list."""
ctx = md5(usedforsecurity=False)
for header in headerlist:
value = request.META.get(header)
if value is not None:
ctx.update(value.encode())
url = md5(request.build_absolute_uri().encode("ascii"), usedforsecurity=False)
cache_key = "views.decorators.cache.cache_page.%s.%s.%s.%s" % (
key_prefix,
method,
url.hexdigest(),
ctx.hexdigest(),
)
return _i18n_cache_key_suffix(request, cache_key)
def _generate_cache_header_key(key_prefix, request):
"""Return a cache key for the header cache."""
url = md5(request.build_absolute_uri().encode("ascii"), usedforsecurity=False)
cache_key = "views.decorators.cache.cache_header.%s.%s" % (
key_prefix,
url.hexdigest(),
)
return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key(request, key_prefix=None, method="GET", cache=None):
"""
Return a cache key based on the request URL and query. It can be used
in the request phase because it pulls the list of headers to take into
account from the global URL registry and uses those to build a cache key
to check against.
If there isn't a headerlist stored, return None, indicating that the page
needs to be rebuilt.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key)
if headerlist is not None:
return _generate_cache_key(request, method, headerlist, key_prefix)
else:
return None
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learn what headers to take into account for some request URL from the
response object. Store those headers in a global URL registry so that
later access to that URL will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
if response.has_header("Vary"):
is_accept_language_redundant = settings.USE_I18N
# If i18n is used, the generated cache key will be suffixed with the
# current locale. Adding the raw value of Accept-Language is redundant
# in that case and would result in storing the same content under
# multiple keys in the cache. See #18191 for details.
headerlist = []
for header in cc_delim_re.split(response.headers["Vary"]):
header = header.upper().replace("-", "_")
if header != "ACCEPT_LANGUAGE" or not is_accept_language_redundant:
headerlist.append("HTTP_" + header)
headerlist.sort()
cache.set(cache_key, headerlist, cache_timeout)
return _generate_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.build_absolute_uri()
cache.set(cache_key, [], cache_timeout)
return _generate_cache_key(request, request.method, [], key_prefix)
def _to_tuple(s):
t = s.split("=", 1)
if len(t) == 2:
return t[0].lower(), t[1]
return t[0].lower(), True
|
1663d48f775f2f68021f68a3716b5c7ac60a6ec8b6f846c718b1b21ef18e240d | import datetime
import decimal
import functools
import re
import unicodedata
from importlib import import_module
from django.conf import settings
from django.utils import dateformat, numberformat
from django.utils.functional import lazy
from django.utils.translation import check_for_language, get_language, to_locale
# format_cache is a mapping from (format_type, lang) to the format string.
# By using the cache, it is possible to avoid running get_format_modules
# repeatedly.
_format_cache = {}
_format_modules_cache = {}
ISO_INPUT_FORMATS = {
"DATE_INPUT_FORMATS": ["%Y-%m-%d"],
"TIME_INPUT_FORMATS": ["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"],
"DATETIME_INPUT_FORMATS": [
"%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M:%S.%f",
"%Y-%m-%d %H:%M",
"%Y-%m-%d",
],
}
FORMAT_SETTINGS = frozenset(
[
"DECIMAL_SEPARATOR",
"THOUSAND_SEPARATOR",
"NUMBER_GROUPING",
"FIRST_DAY_OF_WEEK",
"MONTH_DAY_FORMAT",
"TIME_FORMAT",
"DATE_FORMAT",
"DATETIME_FORMAT",
"SHORT_DATE_FORMAT",
"SHORT_DATETIME_FORMAT",
"YEAR_MONTH_FORMAT",
"DATE_INPUT_FORMATS",
"TIME_INPUT_FORMATS",
"DATETIME_INPUT_FORMATS",
]
)
def reset_format_cache():
"""Clear any cached formats.
This method is provided primarily for testing purposes,
so that the effects of cached formats can be removed.
"""
global _format_cache, _format_modules_cache
_format_cache = {}
_format_modules_cache = {}
def iter_format_modules(lang, format_module_path=None):
"""Find format modules."""
if not check_for_language(lang):
return
if format_module_path is None:
format_module_path = settings.FORMAT_MODULE_PATH
format_locations = []
if format_module_path:
if isinstance(format_module_path, str):
format_module_path = [format_module_path]
for path in format_module_path:
format_locations.append(path + ".%s")
format_locations.append("django.conf.locale.%s")
locale = to_locale(lang)
locales = [locale]
if "_" in locale:
locales.append(locale.split("_")[0])
for location in format_locations:
for loc in locales:
try:
yield import_module("%s.formats" % (location % loc))
except ImportError:
pass
def get_format_modules(lang=None):
"""Return a list of the format modules found."""
if lang is None:
lang = get_language()
if lang not in _format_modules_cache:
_format_modules_cache[lang] = list(
iter_format_modules(lang, settings.FORMAT_MODULE_PATH)
)
return _format_modules_cache[lang]
def get_format(format_type, lang=None, use_l10n=None):
"""
For a specific format type, return the format for the current
language (locale). Default to the format in the settings.
format_type is the name of the format, e.g. 'DATE_FORMAT'.
If use_l10n is provided and is not None, it forces the value to
be localized (or not), otherwise it's always localized.
"""
if use_l10n is None:
use_l10n = True
if use_l10n and lang is None:
lang = get_language()
format_type = str(format_type) # format_type may be lazy.
cache_key = (format_type, lang)
try:
return _format_cache[cache_key]
except KeyError:
pass
# The requested format_type has not been cached yet. Try to find it in any
# of the format_modules for the given lang if l10n is enabled. If it's not
# there or if l10n is disabled, fall back to the project settings.
val = None
if use_l10n:
for module in get_format_modules(lang):
val = getattr(module, format_type, None)
if val is not None:
break
if val is None:
if format_type not in FORMAT_SETTINGS:
return format_type
val = getattr(settings, format_type)
elif format_type in ISO_INPUT_FORMATS:
# If a list of input formats from one of the format_modules was
# retrieved, make sure the ISO_INPUT_FORMATS are in this list.
val = list(val)
for iso_input in ISO_INPUT_FORMATS.get(format_type, ()):
if iso_input not in val:
val.append(iso_input)
_format_cache[cache_key] = val
return val
get_format_lazy = lazy(get_format, str, list, tuple)
def date_format(value, format=None, use_l10n=None):
"""
Format a datetime.date or datetime.datetime object using a
localizable format.
If use_l10n is provided and is not None, that will force the value to
be localized (or not), otherwise it's always localized.
"""
return dateformat.format(
value, get_format(format or "DATE_FORMAT", use_l10n=use_l10n)
)
def time_format(value, format=None, use_l10n=None):
"""
Format a datetime.time object using a localizable format.
If use_l10n is provided and is not None, it forces the value to
be localized (or not), otherwise it's always localized.
"""
return dateformat.time_format(
value, get_format(format or "TIME_FORMAT", use_l10n=use_l10n)
)
def number_format(value, decimal_pos=None, use_l10n=None, force_grouping=False):
"""
Format a numeric value using localization settings.
If use_l10n is provided and is not None, it forces the value to
be localized (or not), otherwise it's always localized.
"""
if use_l10n is None:
use_l10n = True
lang = get_language() if use_l10n else None
return numberformat.format(
value,
get_format("DECIMAL_SEPARATOR", lang, use_l10n=use_l10n),
decimal_pos,
get_format("NUMBER_GROUPING", lang, use_l10n=use_l10n),
get_format("THOUSAND_SEPARATOR", lang, use_l10n=use_l10n),
force_grouping=force_grouping,
use_l10n=use_l10n,
)
def localize(value, use_l10n=None):
"""
Check if value is a localizable type (date, number...) and return it
formatted as a string using current locale format.
If use_l10n is provided and is not None, it forces the value to
be localized (or not), otherwise it's always localized.
"""
if isinstance(value, str): # Handle strings first for performance reasons.
return value
elif isinstance(value, bool): # Make sure booleans don't get treated as numbers
return str(value)
elif isinstance(value, (decimal.Decimal, float, int)):
if use_l10n is False:
return str(value)
return number_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.datetime):
return date_format(value, "DATETIME_FORMAT", use_l10n=use_l10n)
elif isinstance(value, datetime.date):
return date_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.time):
return time_format(value, use_l10n=use_l10n)
return value
def localize_input(value, default=None):
"""
Check if an input value is a localizable type and return it
formatted with the appropriate formatting string of the current locale.
"""
if isinstance(value, str): # Handle strings first for performance reasons.
return value
elif isinstance(value, bool): # Don't treat booleans as numbers.
return str(value)
elif isinstance(value, (decimal.Decimal, float, int)):
return number_format(value)
elif isinstance(value, datetime.datetime):
format = default or get_format("DATETIME_INPUT_FORMATS")[0]
format = sanitize_strftime_format(format)
return value.strftime(format)
elif isinstance(value, datetime.date):
format = default or get_format("DATE_INPUT_FORMATS")[0]
format = sanitize_strftime_format(format)
return value.strftime(format)
elif isinstance(value, datetime.time):
format = default or get_format("TIME_INPUT_FORMATS")[0]
return value.strftime(format)
return value
@functools.lru_cache
def sanitize_strftime_format(fmt):
"""
Ensure that certain specifiers are correctly padded with leading zeros.
For years < 1000 specifiers %C, %F, %G, and %Y don't work as expected for
strftime provided by glibc on Linux as they don't pad the year or century
with leading zeros. Support for specifying the padding explicitly is
available, however, which can be used to fix this issue.
FreeBSD, macOS, and Windows do not support explicitly specifying the
padding, but return four digit years (with leading zeros) as expected.
This function checks whether the %Y produces a correctly padded string and,
if not, makes the following substitutions:
- %C → %02C
- %F → %010F
- %G → %04G
- %Y → %04Y
See https://bugs.python.org/issue13305 for more details.
"""
if datetime.date(1, 1, 1).strftime("%Y") == "0001":
return fmt
mapping = {"C": 2, "F": 10, "G": 4, "Y": 4}
return re.sub(
r"((?:^|[^%])(?:%%)*)%([CFGY])",
lambda m: r"%s%%0%s%s" % (m[1], mapping[m[2]], m[2]),
fmt,
)
def sanitize_separators(value):
"""
Sanitize a value according to the current decimal and
thousand separator setting. Used with form field input.
"""
if isinstance(value, str):
parts = []
decimal_separator = get_format("DECIMAL_SEPARATOR")
if decimal_separator in value:
value, decimals = value.split(decimal_separator, 1)
parts.append(decimals)
if settings.USE_THOUSAND_SEPARATOR:
thousand_sep = get_format("THOUSAND_SEPARATOR")
if (
thousand_sep == "."
and value.count(".") == 1
and len(value.split(".")[-1]) != 3
):
# Special case where we suspect a dot meant decimal separator
# (see #22171).
pass
else:
for replacement in {
thousand_sep,
unicodedata.normalize("NFKD", thousand_sep),
}:
value = value.replace(replacement, "")
parts.append(value)
value = ".".join(reversed(parts))
return value
|
b66c47d004c71e0ee1cc19ef29bf2495ad046299897fe4ee07cfc201e5708ee8 | """Default tags used by the template system, available to all templates."""
import re
import sys
import warnings
from collections import namedtuple
from datetime import datetime
from itertools import cycle as itertools_cycle
from itertools import groupby
from django.conf import settings
from django.utils import timezone
from django.utils.html import conditional_escape, escape, format_html
from django.utils.lorem_ipsum import paragraphs, words
from django.utils.safestring import mark_safe
from .base import (
BLOCK_TAG_END,
BLOCK_TAG_START,
COMMENT_TAG_END,
COMMENT_TAG_START,
FILTER_SEPARATOR,
SINGLE_BRACE_END,
SINGLE_BRACE_START,
VARIABLE_ATTRIBUTE_SEPARATOR,
VARIABLE_TAG_END,
VARIABLE_TAG_START,
Node,
NodeList,
TemplateSyntaxError,
VariableDoesNotExist,
kwarg_re,
render_value_in_context,
token_kwargs,
)
from .context import Context
from .defaultfilters import date
from .library import Library
from .smartif import IfParser, Literal
register = Library()
class AutoEscapeControlNode(Node):
"""Implement the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
child_nodelists = ()
def render(self, context):
return ""
class CsrfTokenNode(Node):
child_nodelists = ()
def render(self, context):
csrf_token = context.get("csrf_token")
if csrf_token:
if csrf_token == "NOTPROVIDED":
return format_html("")
else:
return format_html(
'<input type="hidden" name="csrfmiddlewaretoken" value="{}">',
csrf_token,
)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn(
"A {% csrf_token %} was used in a template, but the context "
"did not provide the value. This is usually caused by not "
"using RequestContext."
)
return ""
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context.set_upward(self.variable_name, value)
if self.silent:
return ""
return render_value_in_context(value, context)
def reset(self, context):
"""
Reset the cycle iteration back to the beginning.
"""
context.render_context[self] = itertools_cycle(self.cyclevars)
class DebugNode(Node):
def render(self, context):
if not settings.DEBUG:
return ""
from pprint import pformat
output = [escape(pformat(val)) for val in context]
output.append("\n\n")
output.append(escape(pformat(sys.modules)))
return "".join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables, asvar=None):
self.vars = variables
self.asvar = asvar
def render(self, context):
first = ""
for var in self.vars:
value = var.resolve(context, ignore_failures=True)
if value:
first = render_value_in_context(value, context)
break
if self.asvar:
context[self.asvar] = first
return ""
return first
class ForNode(Node):
child_nodelists = ("nodelist_loop", "nodelist_empty")
def __init__(
self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None
):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = " reversed" if self.is_reversed else ""
return "<%s: for %s in %s, tail_len: %d%s>" % (
self.__class__.__name__,
", ".join(self.loopvars),
self.sequence,
len(self.nodelist_loop),
reversed_text,
)
def render(self, context):
if "forloop" in context:
parentloop = context["forloop"]
else:
parentloop = {}
with context.push():
values = self.sequence.resolve(context, ignore_failures=True)
if values is None:
values = []
if not hasattr(values, "__len__"):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context["forloop"] = {"parentloop": parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict["counter0"] = i
loop_dict["counter"] = i + 1
# Reverse counter iteration numbers.
loop_dict["revcounter"] = len_values - i
loop_dict["revcounter0"] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict["first"] = i == 0
loop_dict["last"] = i == len_values - 1
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
len_item = len(item)
except TypeError: # not an iterable
len_item = 1
# Check loop variable count before unpacking
if num_loopvars != len_item:
raise ValueError(
"Need {} values to unpack in for loop; got {}. ".format(
num_loopvars, len_item
),
)
unpacked_vars = dict(zip(self.loopvars, item))
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
for node in self.nodelist_loop:
nodelist.append(node.render_annotated(context))
if pop_context:
# Pop the loop variables pushed on to the context to avoid
# the context ending up in an inconsistent state when other
# tags (e.g., include and with) push data to context.
context.pop()
return mark_safe("".join(nodelist))
class IfChangedNode(Node):
child_nodelists = ("nodelist_true", "nodelist_false")
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
state_frame.setdefault(self)
nodelist_true_output = None
if self._varlist:
# Consider multiple parameters. This behaves like an OR evaluation
# of the multiple variables.
compare_to = [
var.resolve(context, ignore_failures=True) for var in self._varlist
]
else:
# The "{% ifchanged %}" syntax (without any variables) compares
# the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
if compare_to != state_frame[self]:
state_frame[self] = compare_to
# render true block if not already rendered
return nodelist_true_output or self.nodelist_true.render(context)
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ""
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can
# create a new scope. Find the place where to store the state to detect
# changes.
if "forloop" in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context["forloop"]
else:
# Using ifchanged outside loops. Effectively this is a no-op
# because the state is associated with 'self'.
return context.render_context
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<%s>" % self.__class__.__name__
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
yield from nodelist
@property
def nodelist(self):
return NodeList(self)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ""
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == "w":
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == "p":
paras = ["<p>%s</p>" % p for p in paras]
return "\n\n".join(paras)
GroupedResult = namedtuple("GroupedResult", ["grouper", "list"])
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, ignore_failures=True)
def render(self, context):
obj_list = self.target.resolve(context, ignore_failures=True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ""
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
GroupedResult(grouper=key, list=list(val))
for key, val in groupby(
obj_list, lambda obj: self.resolve_expression(obj, context)
)
]
return ""
class LoadNode(Node):
child_nodelists = ()
def render(self, context):
return ""
class NowNode(Node):
def __init__(self, format_string, asvar=None):
self.format_string = format_string
self.asvar = asvar
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
formatted = date(datetime.now(tz=tzinfo), self.format_string)
if self.asvar:
context[self.asvar] = formatted
return ""
else:
return formatted
class ResetCycleNode(Node):
def __init__(self, node):
self.node = node
def render(self, context):
self.node.reset(context)
return ""
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {
"openblock": BLOCK_TAG_START,
"closeblock": BLOCK_TAG_END,
"openvariable": VARIABLE_TAG_START,
"closevariable": VARIABLE_TAG_END,
"openbrace": SINGLE_BRACE_START,
"closebrace": SINGLE_BRACE_END,
"opencomment": COMMENT_TAG_START,
"closecomment": COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, "")
class URLNode(Node):
child_nodelists = ()
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def __repr__(self):
return "<%s view_name='%s' args=%s kwargs=%s as=%s>" % (
self.__class__.__qualname__,
self.view_name,
repr(self.args),
repr(self.kwargs),
repr(self.asvar),
)
def render(self, context):
from django.urls import NoReverseMatch, reverse
args = [arg.resolve(context) for arg in self.args]
kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
view_name = self.view_name.resolve(context)
try:
current_app = context.request.current_app
except AttributeError:
try:
current_app = context.request.resolver_match.namespace
except AttributeError:
current_app = None
# Try to look up the URL. If it fails, raise NoReverseMatch unless the
# {% url ... as var %} construct is used, in which case return nothing.
url = ""
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=current_app)
except NoReverseMatch:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ""
else:
if context.autoescape:
url = conditional_escape(url)
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ""
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(round(ratio))
except ZeroDivisionError:
result = "0"
except (ValueError, TypeError, OverflowError):
result = ""
if self.asvar:
context[self.asvar] = result
return ""
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<%s>" % self.__class__.__name__
def render(self, context):
values = {key: val.resolve(context) for key, val in self.extra_context.items()}
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept
# variable as arguments.
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ("on", "off"):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(("endautoescape",))
parser.delete_first_token()
return AutoEscapeControlNode((arg == "on"), nodelist)
@register.tag
def comment(parser, token):
"""
Ignore everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past("endcomment")
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycle among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
#
# It keeps the last node in the parser to be able to reset it with
# {% resetcycle %}.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, "_named_cycle_nodes"):
raise TemplateSyntaxError(
"No named cycles in template. '%s' is not defined" % name
)
if name not in parser._named_cycle_nodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._named_cycle_nodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError(
"Only 'silent' flag is allowed after cycle's name, not '%s'."
% args[-1]
)
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, "_named_cycle_nodes"):
parser._named_cycle_nodes = {}
parser._named_cycle_nodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
parser._last_cycle_node = node
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Output a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag("filter")
def do_filter(parser, token):
"""
Filter the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept
# variable as arguments.
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, "_filter_name", None)
if filter_name in ("escape", "safe"):
raise TemplateSyntaxError(
'"filter %s" is not permitted. Use the "autoescape" tag instead.'
% filter_name
)
nodelist = parser.parse(("endfilter",))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Output the first variable passed that is not False.
Output nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 as myvar %}
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
but much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
bits = token.split_contents()[1:]
asvar = None
if not bits:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
if len(bits) >= 2 and bits[-2] == "as":
asvar = bits[-1]
bits = bits[:-2]
return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
@register.tag("for")
def do_for(parser, token):
"""
Loop over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if athlete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError(
"'for' statements should have at least four words: %s" % token.contents
)
is_reversed = bits[-1] == "reversed"
in_index = -3 if is_reversed else -2
if bits[in_index] != "in":
raise TemplateSyntaxError(
"'for' statements should use the format"
" 'for x in y': %s" % token.contents
)
invalid_chars = frozenset((" ", '"', "'", FILTER_SEPARATOR))
loopvars = re.split(r" *, *", " ".join(bits[1:in_index]))
for var in loopvars:
if not var or not invalid_chars.isdisjoint(var):
raise TemplateSyntaxError(
"'for' tag received an invalid argument: %s" % token.contents
)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(
(
"empty",
"endfor",
)
)
token = parser.next_token()
if token.contents == "empty":
nodelist_empty = parser.parse(("endfor",))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super().__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag("if")
def do_if(parser, token):
"""
Evaluate a variable, and if that variable is "true" (i.e., exists, is not
empty, and is not a false boolean value), output the contents of the block:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
The ``if`` tag may take one or several `` {% elif %}`` clauses, as well as
an ``{% else %}`` clause that will be displayed if all previous conditions
fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==``, ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(("elif", "else", "endif"))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith("elif"):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(("elif", "else", "endif"))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == "else":
nodelist = parser.parse(("endif",))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
if token.contents != "endif":
raise TemplateSyntaxError(
'Malformed template tag at line {}: "{}"'.format(
token.lineno, token.contents
)
)
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Check if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Check its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(("else", "endifchanged"))
token = parser.next_token()
if token.contents == "else":
nodelist_false = parser.parse(("endifchanged",))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
def find_library(parser, name):
try:
return parser.libraries[name]
except KeyError:
raise TemplateSyntaxError(
"'%s' is not a registered tag library. Must be one of:\n%s"
% (
name,
"\n".join(sorted(parser.libraries)),
),
)
def load_from_library(library, label, names):
"""
Return a subset of tags and filters from a library.
"""
subset = Library()
for name in names:
found = False
if name in library.tags:
found = True
subset.tags[name] = library.tags[name]
if name in library.filters:
found = True
subset.filters[name] = library.filters[name]
if found is False:
raise TemplateSyntaxError(
"'%s' is not a valid tag or filter in tag library '%s'"
% (
name,
label,
),
)
return subset
@register.tag
def load(parser, token):
"""
Load a custom template tag library into the parser.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept
# variable as arguments.
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
# from syntax is used; load individual tags from the library
name = bits[-1]
lib = find_library(parser, name)
subset = load_from_library(lib, name, bits[1:-2])
parser.add_library(subset)
else:
# one or more libraries are specified; load and add them to the parser
for name in bits[1:]:
lib = find_library(parser, name)
parser.add_library(lib)
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Create random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` outputs the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` outputs the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` outputs two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != "random"
if not common:
bits.pop()
# Method bit
if bits[-1] in ("w", "p", "b"):
method = bits.pop()
else:
method = "b"
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = "1"
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Display the date, formatted according to the given string.
Use the same format as PHP's ``date()`` function; see https://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
asvar = None
if len(bits) == 4 and bits[-2] == "as":
asvar = bits[-1]
bits = bits[:-2]
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string, asvar)
@register.tag
def regroup(parser, token):
"""
Regroup a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``musicians`` is a list of ``Musician`` objects that have ``name`` and
``instrument`` attributes, and you'd like to display a list that
looks like:
* Guitar:
* Django Reinhardt
* Emily Remler
* Piano:
* Lovie Austin
* Bud Powell
* Trumpet:
* Duke Ellington
The following snippet of template code would accomplish this dubious task::
{% regroup musicians by instrument as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for musician in group.list %}
<li>{{ musician.name }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Guitar``, ``Piano``
and ``Trumpet``, and ``list`` is the list of musicians who play this
instrument.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
musicians was not sorted by instrument, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup musicians|dictsort:"instrument" by instrument as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != "by":
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != "as":
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(
var_name + VARIABLE_ATTRIBUTE_SEPARATOR + bits[3]
)
return RegroupNode(target, expression, var_name)
@register.tag
def resetcycle(parser, token):
"""
Reset a cycle tag.
If an argument is given, reset the last rendered cycle tag whose name
matches the argument, else reset the last rendered cycle tag (named or
unnamed).
"""
args = token.split_contents()
if len(args) > 2:
raise TemplateSyntaxError("%r tag accepts at most one argument." % args[0])
if len(args) == 2:
name = args[1]
try:
return ResetCycleNode(parser._named_cycle_nodes[name])
except (AttributeError, KeyError):
raise TemplateSyntaxError("Named cycle '%s' does not exist." % name)
try:
return ResetCycleNode(parser._last_cycle_node)
except AttributeError:
raise TemplateSyntaxError("No cycles in template.")
@register.tag
def spaceless(parser, token):
"""
Remove whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example returns this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` isn't stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(("endspaceless",))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Output one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept
# variable as arguments.
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError(
"Invalid templatetag argument: '%s'."
" Must be one of: %s" % (tag, list(TemplateTagNode.mapping))
)
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
r"""
Return an absolute URL matching the given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "url_name" arg1 arg2 %}
or
{% url "url_name" name1=value1 name2=value2 %}
The first argument is a URL pattern name. Other arguments are
space-separated values that will be filled in place of positional and
keyword arguments in the URL. Don't mix positional and keyword arguments.
All arguments for the URL must be present.
For example, if you have a view ``app_name.views.client_details`` taking
the client's id and the corresponding line in a URLconf looks like this::
path('client/<int:id>/', views.client_details, name='client-detail-view')
and this app's URLconf is included into the project's URLconf under some
path::
path('clients/', include('app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "client-detail-view" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument may also be the name of a template variable that will be
evaluated to obtain the view name or the URL name, e.g.::
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError(
"'%s' takes at least one argument, a URL pattern name." % bits[0]
)
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == "as":
asvar = bits[-1]
bits = bits[:-2]
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stop the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(("endverbatim",))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such. Calculate the ratio of a given value to a
maximum value, and then apply that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}">
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktranslate like this::
{% widthratio this_value max_value max_width as width %}
{% blocktranslate %}The width is: {{ width }}{% endblocktranslate %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != "as":
raise TemplateSyntaxError(
"Invalid syntax in widthratio tag. Expecting 'as' keyword"
)
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(
parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar,
)
@register.tag("with")
def do_with(parser, token):
"""
Add one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError(
"%r expected at least one variable assignment" % bits[0]
)
if remaining_bits:
raise TemplateSyntaxError(
"%r received an invalid token: %r" % (bits[0], remaining_bits[0])
)
nodelist = parser.parse(("endwith",))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
ba503b3d8520381c295ba4236c4986dcaeac797501a211c5a288a822b0c5aab9 | """Default variable filters."""
import random as random_module
import re
import types
import warnings
from decimal import ROUND_HALF_UP, Context, Decimal, InvalidOperation
from functools import wraps
from inspect import unwrap
from operator import itemgetter
from pprint import pformat
from urllib.parse import quote
from django.utils import formats
from django.utils.dateformat import format, time_format
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.encoding import iri_to_uri
from django.utils.html import avoid_wrapping, conditional_escape, escape, escapejs
from django.utils.html import json_script as _json_script
from django.utils.html import linebreaks, strip_tags
from django.utils.html import urlize as _urlize
from django.utils.safestring import SafeData, mark_safe
from django.utils.text import Truncator, normalize_newlines, phone2numeric
from django.utils.text import slugify as _slugify
from django.utils.text import wrap
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import gettext, ngettext
from .base import VARIABLE_ATTRIBUTE_SEPARATOR
from .library import Library
register = Library()
#######################
# STRING DECORATOR #
#######################
def stringfilter(func):
"""
Decorator for filters which should only receive strings. The object
passed as the first positional argument will be converted to a string.
"""
@wraps(func)
def _dec(first, *args, **kwargs):
first = str(first)
result = func(first, *args, **kwargs)
if isinstance(first, SafeData) and getattr(unwrap(func), "is_safe", False):
result = mark_safe(result)
return result
return _dec
###################
# STRINGS #
###################
@register.filter(is_safe=True)
@stringfilter
def addslashes(value):
"""
Add slashes before quotes. Useful for escaping strings in CSV, for
example. Less useful for escaping JavaScript; use the ``escapejs``
filter instead.
"""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("'", "\\'")
@register.filter(is_safe=True)
@stringfilter
def capfirst(value):
"""Capitalize the first character of the value."""
return value and value[0].upper() + value[1:]
@register.filter("escapejs")
@stringfilter
def escapejs_filter(value):
"""Hex encode characters for use in JavaScript strings."""
return escapejs(value)
@register.filter(is_safe=True)
def json_script(value, element_id=None):
"""
Output value JSON-encoded, wrapped in a <script type="application/json">
tag (with an optional id).
"""
return _json_script(value, element_id)
@register.filter(is_safe=True)
def floatformat(text, arg=-1):
"""
Display a float to a specified number of decimal places.
If called without an argument, display the floating point number with one
decimal place -- but only if there's a decimal place to be displayed:
* num1 = 34.23234
* num2 = 34.00000
* num3 = 34.26000
* {{ num1|floatformat }} displays "34.2"
* {{ num2|floatformat }} displays "34"
* {{ num3|floatformat }} displays "34.3"
If arg is positive, always display exactly arg number of decimal places:
* {{ num1|floatformat:3 }} displays "34.232"
* {{ num2|floatformat:3 }} displays "34.000"
* {{ num3|floatformat:3 }} displays "34.260"
If arg is negative, display arg number of decimal places -- but only if
there are places to be displayed:
* {{ num1|floatformat:"-3" }} displays "34.232"
* {{ num2|floatformat:"-3" }} displays "34"
* {{ num3|floatformat:"-3" }} displays "34.260"
If arg has the 'g' suffix, force the result to be grouped by the
THOUSAND_SEPARATOR for the active locale. When the active locale is
en (English):
* {{ 6666.6666|floatformat:"2g" }} displays "6,666.67"
* {{ 10000|floatformat:"g" }} displays "10,000"
If arg has the 'u' suffix, force the result to be unlocalized. When the
active locale is pl (Polish):
* {{ 66666.6666|floatformat:"2" }} displays "66666,67"
* {{ 66666.6666|floatformat:"2u" }} displays "66666.67"
If the input float is infinity or NaN, display the string representation
of that value.
"""
force_grouping = False
use_l10n = True
if isinstance(arg, str):
last_char = arg[-1]
if arg[-2:] in {"gu", "ug"}:
force_grouping = True
use_l10n = False
arg = arg[:-2] or -1
elif last_char == "g":
force_grouping = True
arg = arg[:-1] or -1
elif last_char == "u":
use_l10n = False
arg = arg[:-1] or -1
try:
input_val = str(text)
d = Decimal(input_val)
except InvalidOperation:
try:
d = Decimal(str(float(text)))
except (ValueError, InvalidOperation, TypeError):
return ""
try:
p = int(arg)
except ValueError:
return input_val
try:
m = int(d) - d
except (ValueError, OverflowError, InvalidOperation):
return input_val
if not m and p <= 0:
return mark_safe(
formats.number_format(
"%d" % (int(d)),
0,
use_l10n=use_l10n,
force_grouping=force_grouping,
)
)
exp = Decimal(1).scaleb(-abs(p))
# Set the precision high enough to avoid an exception (#15789).
tupl = d.as_tuple()
units = len(tupl[1])
units += -tupl[2] if m else tupl[2]
prec = abs(p) + units + 1
# Avoid conversion to scientific notation by accessing `sign`, `digits`,
# and `exponent` from Decimal.as_tuple() directly.
rounded_d = d.quantize(exp, ROUND_HALF_UP, Context(prec=prec))
sign, digits, exponent = rounded_d.as_tuple()
digits = [str(digit) for digit in reversed(digits)]
while len(digits) <= abs(exponent):
digits.append("0")
digits.insert(-exponent, ".")
if sign and rounded_d:
digits.append("-")
number = "".join(reversed(digits))
return mark_safe(
formats.number_format(
number,
abs(p),
use_l10n=use_l10n,
force_grouping=force_grouping,
)
)
@register.filter(is_safe=True)
@stringfilter
def iriencode(value):
"""Escape an IRI value for use in a URL."""
return iri_to_uri(value)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linenumbers(value, autoescape=True):
"""Display text with line numbers."""
lines = value.split("\n")
# Find the maximum width of the line count, for use with zero padding
# string format command
width = str(len(str(len(lines))))
if not autoescape or isinstance(value, SafeData):
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, line)
else:
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, escape(line))
return mark_safe("\n".join(lines))
@register.filter(is_safe=True)
@stringfilter
def lower(value):
"""Convert a string into all lowercase."""
return value.lower()
@register.filter(is_safe=False)
@stringfilter
def make_list(value):
"""
Return the value turned into a list.
For an integer, it's a list of digits.
For a string, it's a list of characters.
"""
return list(value)
@register.filter(is_safe=True)
@stringfilter
def slugify(value):
"""
Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't
alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip
leading and trailing whitespace.
"""
return _slugify(value)
@register.filter(is_safe=True)
def stringformat(value, arg):
"""
Format the variable according to the arg, a string formatting specifier.
This specifier uses Python string formatting syntax, with the exception
that the leading "%" is dropped.
See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting
for documentation of Python string formatting.
"""
if isinstance(value, tuple):
value = str(value)
try:
return ("%" + str(arg)) % value
except (ValueError, TypeError):
return ""
@register.filter(is_safe=True)
@stringfilter
def title(value):
"""Convert a string into titlecase."""
t = re.sub("([a-z])'([A-Z])", lambda m: m[0].lower(), value.title())
return re.sub(r"\d([A-Z])", lambda m: m[0].lower(), t)
@register.filter(is_safe=True)
@stringfilter
def truncatechars(value, arg):
"""Truncate a string after `arg` number of characters."""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).chars(length)
@register.filter(is_safe=True)
@stringfilter
def truncatechars_html(value, arg):
"""
Truncate HTML after `arg` number of chars.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).chars(length, html=True)
@register.filter(is_safe=True)
@stringfilter
def truncatewords(value, arg):
"""
Truncate a string after `arg` number of words.
Remove newlines within the string.
"""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).words(length, truncate=" …")
@register.filter(is_safe=True)
@stringfilter
def truncatewords_html(value, arg):
"""
Truncate HTML after `arg` number of words.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).words(length, html=True, truncate=" …")
@register.filter(is_safe=False)
@stringfilter
def upper(value):
"""Convert a string into all uppercase."""
return value.upper()
@register.filter(is_safe=False)
@stringfilter
def urlencode(value, safe=None):
"""
Escape a value for use in a URL.
The ``safe`` parameter determines the characters which should not be
escaped by Python's quote() function. If not provided, use the default safe
characters (but an empty string can be provided when *all* characters
should be escaped).
"""
kwargs = {}
if safe is not None:
kwargs["safe"] = safe
return quote(value, **kwargs)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlize(value, autoescape=True):
"""Convert URLs in plain text into clickable links."""
return mark_safe(_urlize(value, nofollow=True, autoescape=autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlizetrunc(value, limit, autoescape=True):
"""
Convert URLs into clickable links, truncating URLs to the given character
limit, and adding 'rel=nofollow' attribute to discourage spamming.
Argument: Length to truncate URLs to.
"""
return mark_safe(
_urlize(value, trim_url_limit=int(limit), nofollow=True, autoescape=autoescape)
)
@register.filter(is_safe=False)
@stringfilter
def wordcount(value):
"""Return the number of words."""
return len(value.split())
@register.filter(is_safe=True)
@stringfilter
def wordwrap(value, arg):
"""Wrap words at `arg` line length."""
return wrap(value, int(arg))
@register.filter(is_safe=True)
@stringfilter
def ljust(value, arg):
"""Left-align the value in a field of a given width."""
return value.ljust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def rjust(value, arg):
"""Right-align the value in a field of a given width."""
return value.rjust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def center(value, arg):
"""Center the value in a field of a given width."""
return value.center(int(arg))
@register.filter
@stringfilter
def cut(value, arg):
"""Remove all values of arg from the given string."""
safe = isinstance(value, SafeData)
value = value.replace(arg, "")
if safe and arg != ";":
return mark_safe(value)
return value
###################
# HTML STRINGS #
###################
@register.filter("escape", is_safe=True)
@stringfilter
def escape_filter(value):
"""Mark the value as a string that should be auto-escaped."""
return conditional_escape(value)
@register.filter(is_safe=True)
@stringfilter
def force_escape(value):
"""
Escape a string's HTML. Return a new string containing the escaped
characters (as opposed to "escape", which marks the content for later
possible escaping).
"""
return escape(value)
@register.filter("linebreaks", is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaks_filter(value, autoescape=True):
"""
Replace line breaks in plain text with appropriate HTML; a single
newline becomes an HTML line break (``<br>``) and a new line
followed by a blank line becomes a paragraph break (``</p>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
return mark_safe(linebreaks(value, autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaksbr(value, autoescape=True):
"""
Convert all newlines in a piece of plain text to HTML line breaks
(``<br>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
value = normalize_newlines(value)
if autoescape:
value = escape(value)
return mark_safe(value.replace("\n", "<br>"))
@register.filter(is_safe=True)
@stringfilter
def safe(value):
"""Mark the value as a string that should not be auto-escaped."""
return mark_safe(value)
@register.filter(is_safe=True)
def safeseq(value):
"""
A "safe" filter for sequences. Mark each element in the sequence,
individually, as safe, after converting them to strings. Return a list
with the results.
"""
return [mark_safe(obj) for obj in value]
@register.filter(is_safe=True)
@stringfilter
def striptags(value):
"""Strip all [X]HTML tags."""
return strip_tags(value)
###################
# LISTS #
###################
def _property_resolver(arg):
"""
When arg is convertible to float, behave like operator.itemgetter(arg)
Otherwise, chain __getitem__() and getattr().
>>> _property_resolver(1)('abc')
'b'
>>> _property_resolver('1')('abc')
Traceback (most recent call last):
...
TypeError: string indices must be integers
>>> class Foo:
... a = 42
... b = 3.14
... c = 'Hey!'
>>> _property_resolver('b')(Foo())
3.14
"""
try:
float(arg)
except ValueError:
if VARIABLE_ATTRIBUTE_SEPARATOR + "_" in arg or arg[0] == "_":
raise AttributeError("Access to private variables is forbidden.")
parts = arg.split(VARIABLE_ATTRIBUTE_SEPARATOR)
def resolve(value):
for part in parts:
try:
value = value[part]
except (AttributeError, IndexError, KeyError, TypeError, ValueError):
value = getattr(value, part)
return value
return resolve
else:
return itemgetter(arg)
@register.filter(is_safe=False)
def dictsort(value, arg):
"""
Given a list of dicts, return that list sorted by the property given in
the argument.
"""
try:
return sorted(value, key=_property_resolver(arg))
except (AttributeError, TypeError):
return ""
@register.filter(is_safe=False)
def dictsortreversed(value, arg):
"""
Given a list of dicts, return that list sorted in reverse order by the
property given in the argument.
"""
try:
return sorted(value, key=_property_resolver(arg), reverse=True)
except (AttributeError, TypeError):
return ""
@register.filter(is_safe=False)
def first(value):
"""Return the first item in a list."""
try:
return value[0]
except IndexError:
return ""
@register.filter(is_safe=True, needs_autoescape=True)
def join(value, arg, autoescape=True):
"""Join a list with a string, like Python's ``str.join(list)``."""
try:
if autoescape:
value = [conditional_escape(v) for v in value]
data = conditional_escape(arg).join(value)
except TypeError: # Fail silently if arg isn't iterable.
return value
return mark_safe(data)
@register.filter(is_safe=True)
def last(value):
"""Return the last item in a list."""
try:
return value[-1]
except IndexError:
return ""
@register.filter(is_safe=False)
def length(value):
"""Return the length of the value - useful for lists."""
try:
return len(value)
except (ValueError, TypeError):
return 0
@register.filter(is_safe=False)
def length_is(value, arg):
"""Return a boolean of whether the value's length is the argument."""
warnings.warn(
"The length_is template filter is deprecated in favor of the length template "
"filter and the == operator within an {% if %} tag.",
RemovedInDjango51Warning,
)
try:
return len(value) == int(arg)
except (ValueError, TypeError):
return ""
@register.filter(is_safe=True)
def random(value):
"""Return a random item from the list."""
return random_module.choice(value)
@register.filter("slice", is_safe=True)
def slice_filter(value, arg):
"""
Return a slice of the list using the same syntax as Python's list slicing.
"""
try:
bits = []
for x in str(arg).split(":"):
if not x:
bits.append(None)
else:
bits.append(int(x))
return value[slice(*bits)]
except (ValueError, TypeError):
return value # Fail silently.
@register.filter(is_safe=True, needs_autoescape=True)
def unordered_list(value, autoescape=True):
"""
Recursively take a self-nested list and return an HTML unordered list --
WITHOUT opening and closing <ul> tags.
Assume the list is in the proper format. For example, if ``var`` contains:
``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then
``{{ var|unordered_list }}`` returns::
<li>States
<ul>
<li>Kansas
<ul>
<li>Lawrence</li>
<li>Topeka</li>
</ul>
</li>
<li>Illinois</li>
</ul>
</li>
"""
if autoescape:
escaper = conditional_escape
else:
def escaper(x):
return x
def walk_items(item_list):
item_iterator = iter(item_list)
try:
item = next(item_iterator)
while True:
try:
next_item = next(item_iterator)
except StopIteration:
yield item, None
break
if isinstance(next_item, (list, tuple, types.GeneratorType)):
try:
iter(next_item)
except TypeError:
pass
else:
yield item, next_item
item = next(item_iterator)
continue
yield item, None
item = next_item
except StopIteration:
pass
def list_formatter(item_list, tabs=1):
indent = "\t" * tabs
output = []
for item, children in walk_items(item_list):
sublist = ""
if children:
sublist = "\n%s<ul>\n%s\n%s</ul>\n%s" % (
indent,
list_formatter(children, tabs + 1),
indent,
indent,
)
output.append("%s<li>%s%s</li>" % (indent, escaper(item), sublist))
return "\n".join(output)
return mark_safe(list_formatter(value))
###################
# INTEGERS #
###################
@register.filter(is_safe=False)
def add(value, arg):
"""Add the arg to the value."""
try:
return int(value) + int(arg)
except (ValueError, TypeError):
try:
return value + arg
except Exception:
return ""
@register.filter(is_safe=False)
def get_digit(value, arg):
"""
Given a whole number, return the requested digit of it, where 1 is the
right-most digit, 2 is the second-right-most digit, etc. Return the
original value for invalid input (if input or argument is not an integer,
or if argument is less than 1). Otherwise, output is always an integer.
"""
try:
arg = int(arg)
value = int(value)
except ValueError:
return value # Fail silently for an invalid argument
if arg < 1:
return value
try:
return int(str(value)[-arg])
except IndexError:
return 0
###################
# DATES #
###################
@register.filter(expects_localtime=True, is_safe=False)
def date(value, arg=None):
"""Format a date according to the given format."""
if value in (None, ""):
return ""
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ""
@register.filter(expects_localtime=True, is_safe=False)
def time(value, arg=None):
"""Format a time according to the given format."""
if value in (None, ""):
return ""
try:
return formats.time_format(value, arg)
except (AttributeError, TypeError):
try:
return time_format(value, arg)
except (AttributeError, TypeError):
return ""
@register.filter("timesince", is_safe=False)
def timesince_filter(value, arg=None):
"""Format a date as the time since that date (i.e. "4 days, 6 hours")."""
if not value:
return ""
try:
if arg:
return timesince(value, arg)
return timesince(value)
except (ValueError, TypeError):
return ""
@register.filter("timeuntil", is_safe=False)
def timeuntil_filter(value, arg=None):
"""Format a date as the time until that date (i.e. "4 days, 6 hours")."""
if not value:
return ""
try:
return timeuntil(value, arg)
except (ValueError, TypeError):
return ""
###################
# LOGIC #
###################
@register.filter(is_safe=False)
def default(value, arg):
"""If value is unavailable, use given default."""
return value or arg
@register.filter(is_safe=False)
def default_if_none(value, arg):
"""If value is None, use given default."""
if value is None:
return arg
return value
@register.filter(is_safe=False)
def divisibleby(value, arg):
"""Return True if the value is divisible by the argument."""
return int(value) % int(arg) == 0
@register.filter(is_safe=False)
def yesno(value, arg=None):
"""
Given a string mapping values for true, false, and (optionally) None,
return one of those strings according to the value:
========== ====================== ==================================
Value Argument Outputs
========== ====================== ==================================
``True`` ``"yeah,no,maybe"`` ``yeah``
``False`` ``"yeah,no,maybe"`` ``no``
``None`` ``"yeah,no,maybe"`` ``maybe``
``None`` ``"yeah,no"`` ``"no"`` (converts None to False
if no mapping for None is given.
========== ====================== ==================================
"""
if arg is None:
# Translators: Please do not add spaces around commas.
arg = gettext("yes,no,maybe")
bits = arg.split(",")
if len(bits) < 2:
return value # Invalid arg.
try:
yes, no, maybe = bits
except ValueError:
# Unpack list of wrong size (no "maybe" value provided).
yes, no, maybe = bits[0], bits[1], bits[1]
if value is None:
return maybe
if value:
return yes
return no
###################
# MISC #
###################
@register.filter(is_safe=True)
def filesizeformat(bytes_):
"""
Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 bytes, etc.).
"""
try:
bytes_ = int(bytes_)
except (TypeError, ValueError, UnicodeDecodeError):
value = ngettext("%(size)d byte", "%(size)d bytes", 0) % {"size": 0}
return avoid_wrapping(value)
def filesize_number_format(value):
return formats.number_format(round(value, 1), 1)
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
negative = bytes_ < 0
if negative:
bytes_ = -bytes_ # Allow formatting of negative numbers.
if bytes_ < KB:
value = ngettext("%(size)d byte", "%(size)d bytes", bytes_) % {"size": bytes_}
elif bytes_ < MB:
value = gettext("%s KB") % filesize_number_format(bytes_ / KB)
elif bytes_ < GB:
value = gettext("%s MB") % filesize_number_format(bytes_ / MB)
elif bytes_ < TB:
value = gettext("%s GB") % filesize_number_format(bytes_ / GB)
elif bytes_ < PB:
value = gettext("%s TB") % filesize_number_format(bytes_ / TB)
else:
value = gettext("%s PB") % filesize_number_format(bytes_ / PB)
if negative:
value = "-%s" % value
return avoid_wrapping(value)
@register.filter(is_safe=False)
def pluralize(value, arg="s"):
"""
Return a plural suffix if the value is not 1, '1', or an object of
length 1. By default, use 's' as the suffix:
* If value is 0, vote{{ value|pluralize }} display "votes".
* If value is 1, vote{{ value|pluralize }} display "vote".
* If value is 2, vote{{ value|pluralize }} display "votes".
If an argument is provided, use that string instead:
* If value is 0, class{{ value|pluralize:"es" }} display "classes".
* If value is 1, class{{ value|pluralize:"es" }} display "class".
* If value is 2, class{{ value|pluralize:"es" }} display "classes".
If the provided argument contains a comma, use the text before the comma
for the singular case and the text after the comma for the plural case:
* If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies".
* If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy".
* If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies".
"""
if "," not in arg:
arg = "," + arg
bits = arg.split(",")
if len(bits) > 2:
return ""
singular_suffix, plural_suffix = bits[:2]
try:
return singular_suffix if float(value) == 1 else plural_suffix
except ValueError: # Invalid string that's not a number.
pass
except TypeError: # Value isn't a string or a number; maybe it's a list?
try:
return singular_suffix if len(value) == 1 else plural_suffix
except TypeError: # len() of unsized object.
pass
return ""
@register.filter("phone2numeric", is_safe=True)
def phone2numeric_filter(value):
"""Take a phone number and converts it in to its numerical equivalent."""
return phone2numeric(value)
@register.filter(is_safe=True)
def pprint(value):
"""A wrapper around pprint.pprint -- for debugging, really."""
try:
return pformat(value)
except Exception as e:
return "Error in formatting: %s: %s" % (e.__class__.__name__, e)
|
1ad9fa00692688e1490960a4a68373c4a38670e14504ee7236d7ce88b92f5d1c | """
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
import logging
import string
from collections import defaultdict
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.http import HttpHeaders, UnreadablePostError
from django.urls import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import cached_property
from django.utils.http import is_same_domain
from django.utils.log import log_response
from django.utils.regex_helper import _lazy_re_compile
logger = logging.getLogger("django.security.csrf")
# This matches if any character is not in CSRF_ALLOWED_CHARS.
invalid_token_chars_re = _lazy_re_compile("[^a-zA-Z0-9]")
REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins."
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_CSRF_TOKEN_MISSING = "CSRF token missing."
REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed."
REASON_INSECURE_REFERER = (
"Referer checking failed - Referer is insecure while host is secure."
)
# The reason strings below are for passing to InvalidTokenFormat. They are
# phrases without a subject because they can be in reference to either the CSRF
# cookie or non-cookie token.
REASON_INCORRECT_LENGTH = "has incorrect length"
REASON_INVALID_CHARACTERS = "has invalid characters"
CSRF_SECRET_LENGTH = 32
CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_SESSION_KEY = "_csrftoken"
def _get_failure_view():
"""Return the view to be used for CSRF rejections."""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_string():
return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS)
def _mask_cipher_secret(secret):
"""
Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a
token by adding a mask and applying it to the secret.
"""
mask = _get_new_csrf_string()
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask))
cipher = "".join(chars[(x + y) % len(chars)] for x, y in pairs)
return mask + cipher
def _unmask_cipher_token(token):
"""
Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length
CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt
the second half to produce the original secret.
"""
mask = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask))
return "".join(chars[x - y] for x, y in pairs) # Note negative values are ok
def _add_new_csrf_cookie(request):
"""Generate a new random CSRF_COOKIE value, and add it to request.META."""
csrf_secret = _get_new_csrf_string()
request.META.update(
{
"CSRF_COOKIE": csrf_secret,
"CSRF_COOKIE_NEEDS_UPDATE": True,
}
)
return csrf_secret
def get_token(request):
"""
Return the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
if "CSRF_COOKIE" in request.META:
csrf_secret = request.META["CSRF_COOKIE"]
# Since the cookie is being used, flag to send the cookie in
# process_response() (even if the client already has it) in order to
# renew the expiry timer.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = True
else:
csrf_secret = _add_new_csrf_cookie(request)
return _mask_cipher_secret(csrf_secret)
def rotate_token(request):
"""
Change the CSRF token in use for a request - should be done on login
for security purposes.
"""
_add_new_csrf_cookie(request)
class InvalidTokenFormat(Exception):
def __init__(self, reason):
self.reason = reason
def _check_token_format(token):
"""
Raise an InvalidTokenFormat error if the token has an invalid length or
characters that aren't allowed. The token argument can be a CSRF cookie
secret or non-cookie CSRF token, and either masked or unmasked.
"""
if len(token) not in (CSRF_TOKEN_LENGTH, CSRF_SECRET_LENGTH):
raise InvalidTokenFormat(REASON_INCORRECT_LENGTH)
# Make sure all characters are in CSRF_ALLOWED_CHARS.
if invalid_token_chars_re.search(token):
raise InvalidTokenFormat(REASON_INVALID_CHARACTERS)
def _does_token_match(request_csrf_token, csrf_secret):
"""
Return whether the given CSRF token matches the given CSRF secret, after
unmasking the token if necessary.
This function assumes that the request_csrf_token argument has been
validated to have the correct length (CSRF_SECRET_LENGTH or
CSRF_TOKEN_LENGTH characters) and allowed characters, and that if it has
length CSRF_TOKEN_LENGTH, it is a masked secret.
"""
# Only unmask tokens that are exactly CSRF_TOKEN_LENGTH characters long.
if len(request_csrf_token) == CSRF_TOKEN_LENGTH:
request_csrf_token = _unmask_cipher_token(request_csrf_token)
assert len(request_csrf_token) == CSRF_SECRET_LENGTH
return constant_time_compare(request_csrf_token, csrf_secret)
class RejectRequest(Exception):
def __init__(self, reason):
self.reason = reason
class CsrfViewMiddleware(MiddlewareMixin):
"""
Require a present and correct csrfmiddlewaretoken for POST requests that
have a CSRF cookie, and set an outgoing CSRF cookie.
This middleware should be used in conjunction with the {% csrf_token %}
template tag.
"""
@cached_property
def csrf_trusted_origins_hosts(self):
return [
urlparse(origin).netloc.lstrip("*")
for origin in settings.CSRF_TRUSTED_ORIGINS
]
@cached_property
def allowed_origins_exact(self):
return {origin for origin in settings.CSRF_TRUSTED_ORIGINS if "*" not in origin}
@cached_property
def allowed_origin_subdomains(self):
"""
A mapping of allowed schemes to list of allowed netlocs, where all
subdomains of the netloc are allowed.
"""
allowed_origin_subdomains = defaultdict(list)
for parsed in (
urlparse(origin)
for origin in settings.CSRF_TRUSTED_ORIGINS
if "*" in origin
):
allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip("*"))
return allowed_origin_subdomains
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
response = _get_failure_view()(request, reason=reason)
log_response(
"Forbidden (%s): %s",
reason,
request.path,
response=response,
request=request,
logger=logger,
)
return response
def _get_secret(self, request):
"""
Return the CSRF secret originally associated with the request, or None
if it didn't have one.
If the CSRF_USE_SESSIONS setting is false, raises InvalidTokenFormat if
the request's secret has invalid characters or an invalid length.
"""
if settings.CSRF_USE_SESSIONS:
try:
csrf_secret = request.session.get(CSRF_SESSION_KEY)
except AttributeError:
raise ImproperlyConfigured(
"CSRF_USE_SESSIONS is enabled, but request.session is not "
"set. SessionMiddleware must appear before CsrfViewMiddleware "
"in MIDDLEWARE."
)
else:
try:
csrf_secret = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
csrf_secret = None
else:
# This can raise InvalidTokenFormat.
_check_token_format(csrf_secret)
if csrf_secret is None:
return None
# Django versions before 4.0 masked the secret before storing.
if len(csrf_secret) == CSRF_TOKEN_LENGTH:
csrf_secret = _unmask_cipher_token(csrf_secret)
return csrf_secret
def _set_csrf_cookie(self, request, response):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META["CSRF_COOKIE"]:
request.session[CSRF_SESSION_KEY] = request.META["CSRF_COOKIE"]
else:
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ("Cookie",))
def _origin_verified(self, request):
request_origin = request.META["HTTP_ORIGIN"]
try:
good_host = request.get_host()
except DisallowedHost:
pass
else:
good_origin = "%s://%s" % (
"https" if request.is_secure() else "http",
good_host,
)
if request_origin == good_origin:
return True
if request_origin in self.allowed_origins_exact:
return True
try:
parsed_origin = urlparse(request_origin)
except ValueError:
return False
request_scheme = parsed_origin.scheme
request_netloc = parsed_origin.netloc
return any(
is_same_domain(request_netloc, host)
for host in self.allowed_origin_subdomains.get(request_scheme, ())
)
def _check_referer(self, request):
referer = request.META.get("HTTP_REFERER")
if referer is None:
raise RejectRequest(REASON_NO_REFERER)
try:
referer = urlparse(referer)
except ValueError:
raise RejectRequest(REASON_MALFORMED_REFERER)
# Make sure we have a valid URL for Referer.
if "" in (referer.scheme, referer.netloc):
raise RejectRequest(REASON_MALFORMED_REFERER)
# Ensure that our Referer is also secure.
if referer.scheme != "https":
raise RejectRequest(REASON_INSECURE_REFERER)
if any(
is_same_domain(referer.netloc, host)
for host in self.csrf_trusted_origins_hosts
):
return
# Allow matching the configured cookie domain.
good_referer = (
settings.SESSION_COOKIE_DOMAIN
if settings.CSRF_USE_SESSIONS
else settings.CSRF_COOKIE_DOMAIN
)
if good_referer is None:
# If no cookie domain is configured, allow matching the current
# host:port exactly if it's permitted by ALLOWED_HOSTS.
try:
# request.get_host() includes the port.
good_referer = request.get_host()
except DisallowedHost:
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
else:
server_port = request.get_port()
if server_port not in ("443", "80"):
good_referer = "%s:%s" % (good_referer, server_port)
if not is_same_domain(referer.netloc, good_referer):
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
def _bad_token_message(self, reason, token_source):
if token_source != "POST":
# Assume it is a settings.CSRF_HEADER_NAME value.
header_name = HttpHeaders.parse_header_name(token_source)
token_source = f"the {header_name!r} HTTP header"
return f"CSRF token from {token_source} {reason}."
def _check_token(self, request):
# Access csrf_secret via self._get_secret() as rotate_token() may have
# been called by an authentication middleware during the
# process_request() phase.
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat as exc:
raise RejectRequest(f"CSRF cookie {exc.reason}.")
if csrf_secret is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
raise RejectRequest(REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get("csrfmiddlewaretoken", "")
except UnreadablePostError:
# Handle a broken connection before we've completed reading the
# POST data. process_view shouldn't raise any exceptions, so
# we'll ignore and serve the user a 403 (assuming they're still
# listening, which they probably aren't because of the error).
pass
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX, and
# possible for PUT/DELETE.
try:
# This can have length CSRF_SECRET_LENGTH or CSRF_TOKEN_LENGTH,
# depending on whether the client obtained the token from
# the DOM or the cookie (and if the cookie, whether the cookie
# was masked or unmasked).
request_csrf_token = request.META[settings.CSRF_HEADER_NAME]
except KeyError:
raise RejectRequest(REASON_CSRF_TOKEN_MISSING)
token_source = settings.CSRF_HEADER_NAME
else:
token_source = "POST"
try:
_check_token_format(request_csrf_token)
except InvalidTokenFormat as exc:
reason = self._bad_token_message(exc.reason, token_source)
raise RejectRequest(reason)
if not _does_token_match(request_csrf_token, csrf_secret):
reason = self._bad_token_message("incorrect", token_source)
raise RejectRequest(reason)
def process_request(self, request):
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat:
_add_new_csrf_cookie(request)
else:
if csrf_secret is not None:
# Use the same secret next time. If the secret was originally
# masked, this also causes it to be replaced with the unmasked
# form, but only in cases where the secret is already getting
# saved anyways.
request.META["CSRF_COOKIE"] = csrf_secret
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, "csrf_processing_done", False):
return None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, "csrf_exempt", False):
return None
# Assume that anything not defined as 'safe' by RFC 9110 needs protection
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
return self._accept(request)
if getattr(request, "_dont_enforce_csrf_checks", False):
# Mechanism to turn off CSRF checks for test suite. It comes after
# the creation of CSRF cookies, so that everything else continues
# to work exactly the same (e.g. cookies are sent, etc.), but
# before any branches that call the _reject method.
return self._accept(request)
# Reject the request if the Origin header doesn't match an allowed
# value.
if "HTTP_ORIGIN" in request.META:
if not self._origin_verified(request):
return self._reject(
request, REASON_BAD_ORIGIN % request.META["HTTP_ORIGIN"]
)
elif request.is_secure():
# If the Origin header wasn't provided, reject HTTPS requests if
# the Referer header doesn't match an allowed value.
#
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent secret
# we're using. So the MITM can circumvent the CSRF protection. This
# is true for any HTTP connection, but anyone using HTTPS expects
# better! For this reason, for https://example.com/ we need
# additional protection that treats http://example.com/ as
# completely untrusted. Under HTTPS, Barth et al. found that the
# Referer header is missing for same-domain requests in only about
# 0.2% of cases or less, so we can use strict Referer checking.
try:
self._check_referer(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
try:
self._check_token(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
return self._accept(request)
def process_response(self, request, response):
if request.META.get("CSRF_COOKIE_NEEDS_UPDATE"):
self._set_csrf_cookie(request, response)
# Unset the flag to prevent _set_csrf_cookie() from being
# unnecessarily called again in process_response() by other
# instances of CsrfViewMiddleware. This can happen e.g. when both a
# decorator and middleware are used. However,
# CSRF_COOKIE_NEEDS_UPDATE is still respected in subsequent calls
# e.g. in case rotate_token() is called in process_response() later
# by custom middleware but before those subsequent calls.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = False
return response
|
b3625954e773642d7a960a1bb08f7787281c747e4bd0a8f8ef8d961245ec55cb | import functools
import uuid
class IntConverter:
regex = "[0-9]+"
def to_python(self, value):
return int(value)
def to_url(self, value):
return str(value)
class StringConverter:
regex = "[^/]+"
def to_python(self, value):
return value
def to_url(self, value):
return value
class UUIDConverter:
regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
def to_python(self, value):
return uuid.UUID(value)
def to_url(self, value):
return str(value)
class SlugConverter(StringConverter):
regex = "[-a-zA-Z0-9_]+"
class PathConverter(StringConverter):
regex = ".+"
DEFAULT_CONVERTERS = {
"int": IntConverter(),
"path": PathConverter(),
"slug": SlugConverter(),
"str": StringConverter(),
"uuid": UUIDConverter(),
}
REGISTERED_CONVERTERS = {}
def register_converter(converter, type_name):
REGISTERED_CONVERTERS[type_name] = converter()
get_converters.cache_clear()
@functools.cache
def get_converters():
return {**DEFAULT_CONVERTERS, **REGISTERED_CONVERTERS}
def get_converter(raw_converter):
return get_converters()[raw_converter]
|
77529273a25547ee59f1ac60ff20e060ab64a8e6dd6c9c1636485c07cc78e11b | import functools
from importlib import import_module
from django.core.exceptions import ViewDoesNotExist
from django.utils.module_loading import module_has_submodule
@functools.cache
def get_callable(lookup_view):
"""
Return a callable corresponding to lookup_view.
* If lookup_view is already a callable, return it.
* If lookup_view is a string import path that can be resolved to a callable,
import that callable and return it, otherwise raise an exception
(ImportError or ViewDoesNotExist).
"""
if callable(lookup_view):
return lookup_view
if not isinstance(lookup_view, str):
raise ViewDoesNotExist(
"'%s' is not a callable or a dot-notation path" % lookup_view
)
mod_name, func_name = get_mod_func(lookup_view)
if not func_name: # No '.' in lookup_view
raise ImportError(
"Could not import '%s'. The path must be fully qualified." % lookup_view
)
try:
mod = import_module(mod_name)
except ImportError:
parentmod, submod = get_mod_func(mod_name)
if submod and not module_has_submodule(import_module(parentmod), submod):
raise ViewDoesNotExist(
"Could not import '%s'. Parent module %s does not exist."
% (lookup_view, mod_name)
)
else:
raise
else:
try:
view_func = getattr(mod, func_name)
except AttributeError:
raise ViewDoesNotExist(
"Could not import '%s'. View does not exist in module %s."
% (lookup_view, mod_name)
)
else:
if not callable(view_func):
raise ViewDoesNotExist(
"Could not import '%s.%s'. View is not callable."
% (mod_name, func_name)
)
return view_func
def get_mod_func(callback):
# Convert 'django.views.news.stories.story_detail' to
# ['django.views.news.stories', 'story_detail']
try:
dot = callback.rindex(".")
except ValueError:
return callback, ""
return callback[:dot], callback[dot + 1 :]
|
e398c70cc779094a2a4e09d07bca2c1a716ad496c89497767c37ea5e2c02335e | """
This module converts requested URLs to callback view functions.
URLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a ResolverMatch object which provides access to all
attributes of the resolved URL match.
"""
import functools
import inspect
import re
import string
from importlib import import_module
from pickle import PicklingError
from urllib.parse import quote
from asgiref.local import Local
from django.conf import settings
from django.core.checks import Error, Warning
from django.core.checks.urls import check_resolver
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.utils.datastructures import MultiValueDict
from django.utils.functional import cached_property
from django.utils.http import RFC3986_SUBDELIMS, escape_leading_slashes
from django.utils.regex_helper import _lazy_re_compile, normalize
from django.utils.translation import get_language
from .converters import get_converter
from .exceptions import NoReverseMatch, Resolver404
from .utils import get_callable
class ResolverMatch:
def __init__(
self,
func,
args,
kwargs,
url_name=None,
app_names=None,
namespaces=None,
route=None,
tried=None,
captured_kwargs=None,
extra_kwargs=None,
):
self.func = func
self.args = args
self.kwargs = kwargs
self.url_name = url_name
self.route = route
self.tried = tried
self.captured_kwargs = captured_kwargs
self.extra_kwargs = extra_kwargs
# If a URLRegexResolver doesn't have a namespace or app_name, it passes
# in an empty value.
self.app_names = [x for x in app_names if x] if app_names else []
self.app_name = ":".join(self.app_names)
self.namespaces = [x for x in namespaces if x] if namespaces else []
self.namespace = ":".join(self.namespaces)
if hasattr(func, "view_class"):
func = func.view_class
if not hasattr(func, "__name__"):
# A class-based view
self._func_path = func.__class__.__module__ + "." + func.__class__.__name__
else:
# A function-based view
self._func_path = func.__module__ + "." + func.__name__
view_path = url_name or self._func_path
self.view_name = ":".join(self.namespaces + [view_path])
def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index]
def __repr__(self):
if isinstance(self.func, functools.partial):
func = repr(self.func)
else:
func = self._func_path
return (
"ResolverMatch(func=%s, args=%r, kwargs=%r, url_name=%r, "
"app_names=%r, namespaces=%r, route=%r%s%s)"
% (
func,
self.args,
self.kwargs,
self.url_name,
self.app_names,
self.namespaces,
self.route,
f", captured_kwargs={self.captured_kwargs!r}"
if self.captured_kwargs
else "",
f", extra_kwargs={self.extra_kwargs!r}" if self.extra_kwargs else "",
)
)
def __reduce_ex__(self, protocol):
raise PicklingError(f"Cannot pickle {self.__class__.__qualname__}.")
def get_resolver(urlconf=None):
if urlconf is None:
urlconf = settings.ROOT_URLCONF
return _get_cached_resolver(urlconf)
@functools.cache
def _get_cached_resolver(urlconf=None):
return URLResolver(RegexPattern(r"^/"), urlconf)
@functools.cache
def get_ns_resolver(ns_pattern, resolver, converters):
# Build a namespaced resolver for the given parent URLconf pattern.
# This makes it possible to have captured parameters in the parent
# URLconf pattern.
pattern = RegexPattern(ns_pattern)
pattern.converters = dict(converters)
ns_resolver = URLResolver(pattern, resolver.url_patterns)
return URLResolver(RegexPattern(r"^/"), [ns_resolver])
class LocaleRegexDescriptor:
def __init__(self, attr):
self.attr = attr
def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
pattern = getattr(instance, self.attr)
if isinstance(pattern, str):
instance.__dict__["regex"] = instance._compile(pattern)
return instance.__dict__["regex"]
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = instance._compile(str(pattern))
return instance._regex_dict[language_code]
class CheckURLMixin:
def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self)
if self.name:
description += " [name='{}']".format(self.name)
return description
def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if regex_pattern.startswith(("/", "^/", "^\\/")) and not regex_pattern.endswith(
"/"
):
warning = Warning(
"Your URL pattern {} has a route beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return []
class RegexPattern(CheckURLMixin):
regex = LocaleRegexDescriptor("_regex")
def __init__(self, regex, name=None, is_endpoint=False):
self._regex = regex
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = {}
def match(self, path):
match = (
self.regex.fullmatch(path)
if self._is_endpoint and self.regex.pattern.endswith("$")
else self.regex.search(path)
)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
args = () if kwargs else match.groups()
kwargs = {k: v for k, v in kwargs.items() if v is not None}
return path[match.end() :], args, kwargs
return None
def check(self):
warnings = []
warnings.extend(self._check_pattern_startswith_slash())
if not self._is_endpoint:
warnings.extend(self._check_include_trailing_dollar())
return warnings
def _check_include_trailing_dollar(self):
regex_pattern = self.regex.pattern
if regex_pattern.endswith("$") and not regex_pattern.endswith(r"\$"):
return [
Warning(
"Your URL pattern {} uses include with a route ending with a '$'. "
"Remove the dollar from the route to avoid problems including "
"URLs.".format(self.describe()),
id="urls.W001",
)
]
else:
return []
def _compile(self, regex):
"""Compile and return the given regular expression."""
try:
return re.compile(regex)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' % (regex, e)
) from e
def __str__(self):
return str(self._regex)
_PATH_PARAMETER_COMPONENT_RE = _lazy_re_compile(
r"<(?:(?P<converter>[^>:]+):)?(?P<parameter>[^>]+)>"
)
def _route_to_regex(route, is_endpoint=False):
"""
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
"""
original_route = route
parts = ["^"]
converters = {}
while True:
match = _PATH_PARAMETER_COMPONENT_RE.search(route)
if not match:
parts.append(re.escape(route))
break
elif not set(match.group()).isdisjoint(string.whitespace):
raise ImproperlyConfigured(
"URL route '%s' cannot contain whitespace in angle brackets "
"<…>." % original_route
)
parts.append(re.escape(route[: match.start()]))
route = route[match.end() :]
parameter = match["parameter"]
if not parameter.isidentifier():
raise ImproperlyConfigured(
"URL route '%s' uses parameter name %r which isn't a valid "
"Python identifier." % (original_route, parameter)
)
raw_converter = match["converter"]
if raw_converter is None:
# If a converter isn't specified, the default is `str`.
raw_converter = "str"
try:
converter = get_converter(raw_converter)
except KeyError as e:
raise ImproperlyConfigured(
"URL route %r uses invalid converter %r."
% (original_route, raw_converter)
) from e
converters[parameter] = converter
parts.append("(?P<" + parameter + ">" + converter.regex + ")")
if is_endpoint:
parts.append(r"\Z")
return "".join(parts), converters
class RoutePattern(CheckURLMixin):
regex = LocaleRegexDescriptor("_route")
def __init__(self, route, name=None, is_endpoint=False):
self._route = route
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = _route_to_regex(str(route), is_endpoint)[1]
def match(self, path):
match = self.regex.search(path)
if match:
# RoutePattern doesn't allow non-named groups so args are ignored.
kwargs = match.groupdict()
for key, value in kwargs.items():
converter = self.converters[key]
try:
kwargs[key] = converter.to_python(value)
except ValueError:
return None
return path[match.end() :], (), kwargs
return None
def check(self):
warnings = self._check_pattern_startswith_slash()
route = self._route
if "(?P<" in route or route.startswith("^") or route.endswith("$"):
warnings.append(
Warning(
"Your URL pattern {} has a route that contains '(?P<', begins "
"with a '^', or ends with a '$'. This was likely an oversight "
"when migrating to django.urls.path().".format(self.describe()),
id="2_0.W001",
)
)
return warnings
def _compile(self, route):
return re.compile(_route_to_regex(route, self._is_endpoint)[0])
def __str__(self):
return str(self._route)
class LocalePrefixPattern:
def __init__(self, prefix_default_language=True):
self.prefix_default_language = prefix_default_language
self.converters = {}
@property
def regex(self):
# This is only used by reverse() and cached in _reverse_dict.
return re.compile(re.escape(self.language_prefix))
@property
def language_prefix(self):
language_code = get_language() or settings.LANGUAGE_CODE
if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language:
return ""
else:
return "%s/" % language_code
def match(self, path):
language_prefix = self.language_prefix
if path.startswith(language_prefix):
return path.removeprefix(language_prefix), (), {}
return None
def check(self):
return []
def describe(self):
return "'{}'".format(self)
def __str__(self):
return self.language_prefix
class URLPattern:
def __init__(self, pattern, callback, default_args=None, name=None):
self.pattern = pattern
self.callback = callback # the view
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.pattern.describe())
def check(self):
warnings = self._check_pattern_name()
warnings.extend(self.pattern.check())
warnings.extend(self._check_callback())
return warnings
def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.pattern.name is not None and ":" in self.pattern.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.pattern.describe()),
id="urls.W003",
)
return [warning]
else:
return []
def _check_callback(self):
from django.views import View
view = self.callback
if inspect.isclass(view) and issubclass(view, View):
return [
Error(
"Your URL pattern %s has an invalid view, pass %s.as_view() "
"instead of %s."
% (
self.pattern.describe(),
view.__name__,
view.__name__,
),
id="urls.E009",
)
]
return []
def resolve(self, path):
match = self.pattern.match(path)
if match:
new_path, args, captured_kwargs = match
# Pass any default args as **kwargs.
kwargs = {**captured_kwargs, **self.default_args}
return ResolverMatch(
self.callback,
args,
kwargs,
self.pattern.name,
route=str(self.pattern),
captured_kwargs=captured_kwargs,
extra_kwargs=self.default_args,
)
@cached_property
def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
if isinstance(callback, functools.partial):
callback = callback.func
if hasattr(callback, "view_class"):
callback = callback.view_class
elif not hasattr(callback, "__name__"):
return callback.__module__ + "." + callback.__class__.__name__
return callback.__module__ + "." + callback.__qualname__
class URLResolver:
def __init__(
self, pattern, urlconf_name, default_kwargs=None, app_name=None, namespace=None
):
self.pattern = pattern
# urlconf_name is the dotted Python path to the module defining
# urlpatterns. It may also be an object with an urlpatterns attribute
# or urlpatterns itself.
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = {}
self._namespace_dict = {}
self._app_dict = {}
# set of dotted paths to all functions and classes that are used in
# urlpatterns
self._callback_strs = set()
self._populated = False
self._local = Local()
def __repr__(self):
if isinstance(self.urlconf_name, list) and self.urlconf_name:
# Don't bother to output the whole list, it can be huge
urlconf_repr = "<%s list>" % self.urlconf_name[0].__class__.__name__
else:
urlconf_repr = repr(self.urlconf_name)
return "<%s %s (%s:%s) %s>" % (
self.__class__.__name__,
urlconf_repr,
self.app_name,
self.namespace,
self.pattern.describe(),
)
def check(self):
messages = []
for pattern in self.url_patterns:
messages.extend(check_resolver(pattern))
messages.extend(self._check_custom_error_handlers())
return messages or self.pattern.check()
def _check_custom_error_handlers(self):
messages = []
# All handlers take (request, exception) arguments except handler500
# which takes (request).
for status_code, num_parameters in [(400, 2), (403, 2), (404, 2), (500, 1)]:
try:
handler = self.resolve_error_handler(status_code)
except (ImportError, ViewDoesNotExist) as e:
path = getattr(self.urlconf_module, "handler%s" % status_code)
msg = (
"The custom handler{status_code} view '{path}' could not be "
"imported."
).format(status_code=status_code, path=path)
messages.append(Error(msg, hint=str(e), id="urls.E008"))
continue
signature = inspect.signature(handler)
args = [None] * num_parameters
try:
signature.bind(*args)
except TypeError:
msg = (
"The custom handler{status_code} view '{path}' does not "
"take the correct number of arguments ({args})."
).format(
status_code=status_code,
path=handler.__module__ + "." + handler.__qualname__,
args="request, exception" if num_parameters == 2 else "request",
)
messages.append(Error(msg, id="urls.E007"))
return messages
def _populate(self):
# Short-circuit if called recursively in this thread to prevent
# infinite recursion. Concurrent threads may call this at the same
# time and will need to continue, so set 'populating' on a
# thread-local variable.
if getattr(self._local, "populating", False):
return
try:
self._local.populating = True
lookups = MultiValueDict()
namespaces = {}
apps = {}
language_code = get_language()
for url_pattern in reversed(self.url_patterns):
p_pattern = url_pattern.pattern.regex.pattern
p_pattern = p_pattern.removeprefix("^")
if isinstance(url_pattern, URLPattern):
self._callback_strs.add(url_pattern.lookup_str)
bits = normalize(url_pattern.pattern.regex.pattern)
lookups.appendlist(
url_pattern.callback,
(
bits,
p_pattern,
url_pattern.default_args,
url_pattern.pattern.converters,
),
)
if url_pattern.name is not None:
lookups.appendlist(
url_pattern.name,
(
bits,
p_pattern,
url_pattern.default_args,
url_pattern.pattern.converters,
),
)
else: # url_pattern is a URLResolver.
url_pattern._populate()
if url_pattern.app_name:
apps.setdefault(url_pattern.app_name, []).append(
url_pattern.namespace
)
namespaces[url_pattern.namespace] = (p_pattern, url_pattern)
else:
for name in url_pattern.reverse_dict:
for (
matches,
pat,
defaults,
converters,
) in url_pattern.reverse_dict.getlist(name):
new_matches = normalize(p_pattern + pat)
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
{**defaults, **url_pattern.default_kwargs},
{
**self.pattern.converters,
**url_pattern.pattern.converters,
**converters,
},
),
)
for namespace, (
prefix,
sub_pattern,
) in url_pattern.namespace_dict.items():
current_converters = url_pattern.pattern.converters
sub_pattern.pattern.converters.update(current_converters)
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in url_pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
self._callback_strs.update(url_pattern._callback_strs)
self._namespace_dict[language_code] = namespaces
self._app_dict[language_code] = apps
self._reverse_dict[language_code] = lookups
self._populated = True
finally:
self._local.populating = False
@property
def reverse_dict(self):
language_code = get_language()
if language_code not in self._reverse_dict:
self._populate()
return self._reverse_dict[language_code]
@property
def namespace_dict(self):
language_code = get_language()
if language_code not in self._namespace_dict:
self._populate()
return self._namespace_dict[language_code]
@property
def app_dict(self):
language_code = get_language()
if language_code not in self._app_dict:
self._populate()
return self._app_dict[language_code]
@staticmethod
def _extend_tried(tried, pattern, sub_tried=None):
if sub_tried is None:
tried.append([pattern])
else:
tried.extend([pattern, *t] for t in sub_tried)
@staticmethod
def _join_route(route1, route2):
"""Join two routes, without the starting ^ in the second route."""
if not route1:
return route2
route2 = route2.removeprefix("^")
return route1 + route2
def _is_callback(self, name):
if not self._populated:
self._populate()
return name in self._callback_strs
def resolve(self, path):
path = str(path) # path may be a reverse_lazy object
tried = []
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404 as e:
self._extend_tried(tried, pattern, e.args[0].get("tried"))
else:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = {**kwargs, **self.default_kwargs}
# Update the sub_match_dict with the kwargs from the sub_match.
sub_match_dict.update(sub_match.kwargs)
# If there are *any* named groups, ignore all non-named groups.
# Otherwise, pass all non-named arguments as positional
# arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = args + sub_match.args
current_route = (
""
if isinstance(pattern, URLPattern)
else str(pattern.pattern)
)
self._extend_tried(tried, pattern, sub_match.tried)
return ResolverMatch(
sub_match.func,
sub_match_args,
sub_match_dict,
sub_match.url_name,
[self.app_name] + sub_match.app_names,
[self.namespace] + sub_match.namespaces,
self._join_route(current_route, sub_match.route),
tried,
captured_kwargs=sub_match.captured_kwargs,
extra_kwargs={
**self.default_kwargs,
**sub_match.extra_kwargs,
},
)
tried.append([pattern])
raise Resolver404({"tried": tried, "path": new_path})
raise Resolver404({"path": path})
@cached_property
def urlconf_module(self):
if isinstance(self.urlconf_name, str):
return import_module(self.urlconf_name)
else:
return self.urlconf_name
@cached_property
def url_patterns(self):
# urlconf_module might be a valid set of patterns, so we default to it
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError as e:
msg = (
"The included URLconf '{name}' does not appear to have "
"any patterns in it. If you see the 'urlpatterns' variable "
"with valid patterns in the file then the issue is probably "
"caused by a circular import."
)
raise ImproperlyConfigured(msg.format(name=self.urlconf_name)) from e
return patterns
def resolve_error_handler(self, view_type):
callback = getattr(self.urlconf_module, "handler%s" % view_type, None)
if not callback:
# No handler specified in file; use lazy import, since
# django.conf.urls imports this file.
from django.conf import urls
callback = getattr(urls, "handler%s" % view_type)
return get_callable(callback)
def reverse(self, lookup_view, *args, **kwargs):
return self._reverse_with_prefix(lookup_view, "", *args, **kwargs)
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
if not self._populated:
self._populate()
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern, defaults, converters in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
candidate_subs = dict(zip(params, args))
else:
if set(kwargs).symmetric_difference(params).difference(defaults):
continue
matches = True
for k, v in defaults.items():
if k in params:
continue
if kwargs.get(k, v) != v:
matches = False
break
if not matches:
continue
candidate_subs = kwargs
# Convert the candidate subs to text using Converter.to_url().
text_candidate_subs = {}
match = True
for k, v in candidate_subs.items():
if k in converters:
try:
text_candidate_subs[k] = converters[k].to_url(v)
except ValueError:
match = False
break
else:
text_candidate_subs[k] = str(v)
if not match:
continue
# WSGI provides decoded URLs, without %xx escapes, and the URL
# resolver operates on such URLs. First substitute arguments
# without quoting to build a decoded URL and look for a match.
# Then, if we have a match, redo the substitution with quoted
# arguments in order to return a properly encoded URL.
candidate_pat = _prefix.replace("%", "%%") + result
if re.search(
"^%s%s" % (re.escape(_prefix), pattern),
candidate_pat % text_candidate_subs,
):
# safe characters from `pchar` definition of RFC 3986
url = quote(
candidate_pat % text_candidate_subs,
safe=RFC3986_SUBDELIMS + "/~:@",
)
# Don't allow construction of scheme relative urls.
return escape_leading_slashes(url)
# lookup_view can be URL name or callable, but callables are not
# friendly in error messages.
m = getattr(lookup_view, "__module__", None)
n = getattr(lookup_view, "__name__", None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
patterns = [pattern for (_, pattern, _, _) in possibilities]
if patterns:
if args:
arg_msg = "arguments '%s'" % (args,)
elif kwargs:
arg_msg = "keyword arguments '%s'" % kwargs
else:
arg_msg = "no arguments"
msg = "Reverse for '%s' with %s not found. %d pattern(s) tried: %s" % (
lookup_view_s,
arg_msg,
len(patterns),
patterns,
)
else:
msg = (
"Reverse for '%(view)s' not found. '%(view)s' is not "
"a valid view function or pattern name." % {"view": lookup_view_s}
)
raise NoReverseMatch(msg)
|
a40a5ae254b60b1fb4f95ee3e322eeee162c2f009f2e8161634e2b426ce1d62c | from django.core.exceptions import ValidationError
from django.forms import Form
from django.forms.fields import BooleanField, IntegerField
from django.forms.renderers import get_default_renderer
from django.forms.utils import ErrorList, RenderableFormMixin
from django.forms.widgets import CheckboxInput, HiddenInput, NumberInput
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
__all__ = ("BaseFormSet", "formset_factory", "all_valid")
# special field names
TOTAL_FORM_COUNT = "TOTAL_FORMS"
INITIAL_FORM_COUNT = "INITIAL_FORMS"
MIN_NUM_FORM_COUNT = "MIN_NUM_FORMS"
MAX_NUM_FORM_COUNT = "MAX_NUM_FORMS"
ORDERING_FIELD_NAME = "ORDER"
DELETION_FIELD_NAME = "DELETE"
# default minimum number of forms in a formset
DEFAULT_MIN_NUM = 0
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
Keep track of how many form instances are displayed on the page. If adding
new forms via JavaScript, you should increment the count field of this form
as well.
"""
TOTAL_FORMS = IntegerField(widget=HiddenInput)
INITIAL_FORMS = IntegerField(widget=HiddenInput)
# MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of the
# management form, but only for the convenience of client-side code. The
# POST value of them returned from the client is not checked.
MIN_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
MAX_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
def clean(self):
cleaned_data = super().clean()
# When the management form is invalid, we don't know how many forms
# were submitted.
cleaned_data.setdefault(TOTAL_FORM_COUNT, 0)
cleaned_data.setdefault(INITIAL_FORM_COUNT, 0)
return cleaned_data
class BaseFormSet(RenderableFormMixin):
"""
A collection of instances of the same Form class.
"""
deletion_widget = CheckboxInput
ordering_widget = NumberInput
default_error_messages = {
"missing_management_form": _(
"ManagementForm data is missing or has been tampered with. Missing fields: "
"%(field_names)s. You may need to file a bug report if the issue persists."
),
"too_many_forms": ngettext_lazy(
"Please submit at most %(num)d form.",
"Please submit at most %(num)d forms.",
"num",
),
"too_few_forms": ngettext_lazy(
"Please submit at least %(num)d form.",
"Please submit at least %(num)d forms.",
"num",
),
}
template_name_div = "django/forms/formsets/div.html"
template_name_p = "django/forms/formsets/p.html"
template_name_table = "django/forms/formsets/table.html"
template_name_ul = "django/forms/formsets/ul.html"
def __init__(
self,
data=None,
files=None,
auto_id="id_%s",
prefix=None,
initial=None,
error_class=ErrorList,
form_kwargs=None,
error_messages=None,
):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.form_kwargs = form_kwargs or {}
self.error_class = error_class
self._errors = None
self._non_form_errors = None
messages = {}
for cls in reversed(type(self).__mro__):
messages.update(getattr(cls, "default_error_messages", {}))
if error_messages is not None:
messages.update(error_messages)
self.error_messages = messages
def __iter__(self):
"""Yield the forms in the order they should be rendered."""
return iter(self.forms)
def __getitem__(self, index):
"""Return the form at the given index, based on the rendering order."""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""
Return True since all formsets have a management form which is not
included in the length.
"""
return True
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = (
self.is_bound
and not self._non_form_errors
and not any(form_errors for form_errors in self._errors)
)
return "<%s: bound=%s valid=%s total_forms=%s>" % (
self.__class__.__qualname__,
self.is_bound,
is_valid,
self.total_form_count(),
)
@cached_property
def management_form(self):
"""Return the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(
self.data,
auto_id=self.auto_id,
prefix=self.prefix,
renderer=self.renderer,
)
form.full_clean()
else:
form = ManagementForm(
auto_id=self.auto_id,
prefix=self.prefix,
initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MIN_NUM_FORM_COUNT: self.min_num,
MAX_NUM_FORM_COUNT: self.max_num,
},
renderer=self.renderer,
)
return form
def total_form_count(self):
"""Return the total number of forms in this FormSet."""
if self.is_bound:
# return absolute_max if it is lower than the actual total form
# count in the data; this is DoS protection to prevent clients
# from forcing the server to instantiate arbitrary numbers of
# forms
return min(
self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max
)
else:
initial_forms = self.initial_form_count()
total_forms = max(initial_forms, self.min_num) + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Return the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = len(self.initial) if self.initial else 0
return initial_forms
@cached_property
def forms(self):
"""Instantiate forms at first property access."""
# DoS protection is included in total_form_count()
return [
self._construct_form(i, **self.get_form_kwargs(i))
for i in range(self.total_form_count())
]
def get_form_kwargs(self, index):
"""
Return additional keyword arguments for each individual formset form.
index will be None if the form being constructed is a new empty
form.
"""
return self.form_kwargs.copy()
def _construct_form(self, i, **kwargs):
"""Instantiate and return the i-th form instance in a formset."""
defaults = {
"auto_id": self.auto_id,
"prefix": self.add_prefix(i),
"error_class": self.error_class,
# Don't render the HTML 'required' attribute as it may cause
# incorrect validation for extra, optional, and deleted
# forms in the formset.
"use_required_attribute": False,
"renderer": self.renderer,
}
if self.is_bound:
defaults["data"] = self.data
defaults["files"] = self.files
if self.initial and "initial" not in kwargs:
try:
defaults["initial"] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty, unless they're part of
# the minimum forms.
if i >= self.initial_form_count() and i >= self.min_num:
defaults["empty_permitted"] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[: self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count() :]
@property
def empty_form(self):
form_kwargs = {
**self.get_form_kwargs(None),
"auto_id": self.auto_id,
"prefix": self.add_prefix("__prefix__"),
"empty_permitted": True,
"use_required_attribute": False,
"renderer": self.renderer,
}
form = self.form(**form_kwargs)
self.add_fields(form, None)
return form
@property
def cleaned_data(self):
"""
Return a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError(
"'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__
)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""Return a list of forms that have been marked for deletion."""
if not self.is_valid() or not self.can_delete:
return []
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, "_deleted_form_indexes"):
self._deleted_form_indexes = []
for i, form in enumerate(self.forms):
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Return a list of form in the order specified by the incoming data.
Raise an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError(
"'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__
)
# Construct _ordering, which is a list of (form_index, order_field_value)
# tuples. After constructing this list, we'll sort it by order_field_value
# so we have a way to get to the form indexes in the order specified
# by the form data.
if not hasattr(self, "_ordering"):
self._ordering = []
for i, form in enumerate(self.forms):
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return "form"
@classmethod
def get_deletion_widget(cls):
return cls.deletion_widget
@classmethod
def get_ordering_widget(cls):
return cls.ordering_widget
def non_form_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Return an empty ErrorList if there
are none.
"""
if self._non_form_errors is None:
self.full_clean()
return self._non_form_errors
@property
def errors(self):
"""Return a list of form.errors for every form in self.forms."""
if self._errors is None:
self.full_clean()
return self._errors
def total_error_count(self):
"""Return the number of errors across all forms in the formset."""
return len(self.non_form_errors()) + sum(
len(form_errors) for form_errors in self.errors
)
def _should_delete_form(self, form):
"""Return whether or not the form was marked for deletion."""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""Return True if every form in self.forms is valid."""
if not self.is_bound:
return False
# Accessing errors triggers a full clean the first time only.
self.errors
# List comprehension ensures is_valid() is called for all forms.
# Forms due to be deleted shouldn't cause the formset to be invalid.
forms_valid = all(
[
form.is_valid()
for form in self.forms
if not (self.can_delete and self._should_delete_form(form))
]
)
return forms_valid and not self.non_form_errors()
def full_clean(self):
"""
Clean all of self.data and populate self._errors and
self._non_form_errors.
"""
self._errors = []
self._non_form_errors = self.error_class(
error_class="nonform", renderer=self.renderer
)
empty_forms_count = 0
if not self.is_bound: # Stop further processing.
return
if not self.management_form.is_valid():
error = ValidationError(
self.error_messages["missing_management_form"],
params={
"field_names": ", ".join(
self.management_form.add_prefix(field_name)
for field_name in self.management_form.errors
),
},
code="missing_management_form",
)
self._non_form_errors.append(error)
for i, form in enumerate(self.forms):
# Empty forms are unchanged forms beyond those with initial data.
if not form.has_changed() and i >= self.initial_form_count():
empty_forms_count += 1
# Accessing errors calls full_clean() if necessary.
# _should_delete_form() requires cleaned_data.
form_errors = form.errors
if self.can_delete and self._should_delete_form(form):
continue
self._errors.append(form_errors)
try:
if (
self.validate_max
and self.total_form_count() - len(self.deleted_forms) > self.max_num
) or self.management_form.cleaned_data[
TOTAL_FORM_COUNT
] > self.absolute_max:
raise ValidationError(
self.error_messages["too_many_forms"] % {"num": self.max_num},
code="too_many_forms",
)
if (
self.validate_min
and self.total_form_count()
- len(self.deleted_forms)
- empty_forms_count
< self.min_num
):
raise ValidationError(
self.error_messages["too_few_forms"] % {"num": self.min_num},
code="too_few_forms",
)
# Give self.clean() a chance to do cross-form validation.
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(
e.error_list,
error_class="nonform",
renderer=self.renderer,
)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accessible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""Return True if data in any form differs from initial."""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
initial_form_count = self.initial_form_count()
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < initial_form_count:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
initial=index + 1,
required=False,
widget=self.get_ordering_widget(),
)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
required=False,
widget=self.get_ordering_widget(),
)
if self.can_delete and (self.can_delete_extra or index < initial_form_count):
form.fields[DELETION_FIELD_NAME] = BooleanField(
label=_("Delete"),
required=False,
widget=self.get_deletion_widget(),
)
def add_prefix(self, index):
return "%s-%s" % (self.prefix, index)
def is_multipart(self):
"""
Return True if the formset needs to be multipart, i.e. it
has FileInput, or False otherwise.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
@property
def template_name(self):
return self.renderer.formset_template_name
def get_context(self):
return {"formset": self}
def formset_factory(
form,
formset=BaseFormSet,
extra=1,
can_order=False,
can_delete=False,
max_num=None,
validate_max=False,
min_num=None,
validate_min=False,
absolute_max=None,
can_delete_extra=True,
renderer=None,
):
"""Return a FormSet for the given form class."""
if min_num is None:
min_num = DEFAULT_MIN_NUM
if max_num is None:
max_num = DEFAULT_MAX_NUM
# absolute_max is a hard limit on forms instantiated, to prevent
# memory-exhaustion attacks. Default to max_num + DEFAULT_MAX_NUM
# (which is 2 * DEFAULT_MAX_NUM if max_num is None in the first place).
if absolute_max is None:
absolute_max = max_num + DEFAULT_MAX_NUM
if max_num > absolute_max:
raise ValueError("'absolute_max' must be greater or equal to 'max_num'.")
attrs = {
"form": form,
"extra": extra,
"can_order": can_order,
"can_delete": can_delete,
"can_delete_extra": can_delete_extra,
"min_num": min_num,
"max_num": max_num,
"absolute_max": absolute_max,
"validate_min": validate_min,
"validate_max": validate_max,
"renderer": renderer or get_default_renderer(),
}
return type(form.__name__ + "FormSet", (formset,), attrs)
def all_valid(formsets):
"""Validate every formset and return True if all are valid."""
# List comprehension ensures is_valid() is called for all formsets.
return all([formset.is_valid() for formset in formsets])
|
6886d96bcf5bfb371d40e92f45c10185da5c6002f60ad19bc73e0a40e095170e | """
HTML Widget classes
"""
import copy
import datetime
import warnings
from collections import defaultdict
from graphlib import CycleError, TopologicalSorter
from itertools import chain
from django.forms.utils import to_current_timezone
from django.templatetags.static import static
from django.utils import formats
from django.utils.dates import MONTHS
from django.utils.formats import get_format
from django.utils.html import format_html, html_safe
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from .renderers import get_default_renderer
__all__ = (
"Media",
"MediaDefiningClass",
"Widget",
"TextInput",
"NumberInput",
"EmailInput",
"URLInput",
"PasswordInput",
"HiddenInput",
"MultipleHiddenInput",
"FileInput",
"ClearableFileInput",
"Textarea",
"DateInput",
"DateTimeInput",
"TimeInput",
"CheckboxInput",
"Select",
"NullBooleanSelect",
"SelectMultiple",
"RadioSelect",
"CheckboxSelectMultiple",
"MultiWidget",
"SplitDateTimeWidget",
"SplitHiddenDateTimeWidget",
"SelectDateWidget",
)
MEDIA_TYPES = ("css", "js")
class MediaOrderConflictWarning(RuntimeWarning):
pass
@html_safe
class Media:
def __init__(self, media=None, css=None, js=None):
if media is not None:
css = getattr(media, "css", {})
js = getattr(media, "js", [])
else:
if css is None:
css = {}
if js is None:
js = []
self._css_lists = [css]
self._js_lists = [js]
def __repr__(self):
return "Media(css=%r, js=%r)" % (self._css, self._js)
def __str__(self):
return self.render()
@property
def _css(self):
css = defaultdict(list)
for css_list in self._css_lists:
for medium, sublist in css_list.items():
css[medium].append(sublist)
return {medium: self.merge(*lists) for medium, lists in css.items()}
@property
def _js(self):
return self.merge(*self._js_lists)
def render(self):
return mark_safe(
"\n".join(
chain.from_iterable(
getattr(self, "render_" + name)() for name in MEDIA_TYPES
)
)
)
def render_js(self):
return [
path.__html__()
if hasattr(path, "__html__")
else format_html('<script src="{}"></script>', self.absolute_path(path))
for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css)
return chain.from_iterable(
[
path.__html__()
if hasattr(path, "__html__")
else format_html(
'<link href="{}" media="{}" rel="stylesheet">',
self.absolute_path(path),
medium,
)
for path in self._css[medium]
]
for medium in media
)
def absolute_path(self, path):
"""
Given a relative or absolute path to a static asset, return an absolute
path. An absolute path will be returned unchanged while a relative path
will be passed to django.templatetags.static.static().
"""
if path.startswith(("http://", "https://", "/")):
return path
return static(path)
def __getitem__(self, name):
"""Return a Media object that only contains media of the given type."""
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, "_" + name)})
raise KeyError('Unknown media type "%s"' % name)
@staticmethod
def merge(*lists):
"""
Merge lists while trying to keep the relative order of the elements.
Warn if the lists have the same elements in a different relative order.
For static assets it can be important to have them included in the DOM
in a certain order. In JavaScript you may not be able to reference a
global or in CSS you might want to override a style.
"""
ts = TopologicalSorter()
for head, *tail in filter(None, lists):
ts.add(head) # Ensure that the first items are included.
for item in tail:
if head != item: # Avoid circular dependency to self.
ts.add(item, head)
head = item
try:
return list(ts.static_order())
except CycleError:
warnings.warn(
"Detected duplicate Media files in an opposite order: {}".format(
", ".join(repr(list_) for list_ in lists)
),
MediaOrderConflictWarning,
)
return list(dict.fromkeys(chain.from_iterable(filter(None, lists))))
def __add__(self, other):
combined = Media()
combined._css_lists = self._css_lists[:]
combined._js_lists = self._js_lists[:]
for item in other._css_lists:
if item and item not in self._css_lists:
combined._css_lists.append(item)
for item in other._js_lists:
if item and item not in self._js_lists:
combined._js_lists.append(item)
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, "Media", None)
if definition:
extend = getattr(definition, "extend", True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m += base[medium]
return m + Media(definition)
return Media(definition)
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = super().__new__(mcs, name, bases, attrs)
if "media" not in attrs:
new_class.media = media_property(new_class)
return new_class
class Widget(metaclass=MediaDefiningClass):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
use_fieldset = False
def __init__(self, attrs=None):
self.attrs = {} if attrs is None else attrs.copy()
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == "hidden" if hasattr(self, "input_type") else False
def subwidgets(self, name, value, attrs=None):
context = self.get_context(name, value, attrs)
yield context["widget"]
def format_value(self, value):
"""
Return a value as it should appear when rendered in a template.
"""
if value == "" or value is None:
return None
if self.is_localized:
return formats.localize_input(value)
return str(value)
def get_context(self, name, value, attrs):
return {
"widget": {
"name": name,
"is_hidden": self.is_hidden,
"required": self.is_required,
"value": self.format_value(value),
"attrs": self.build_attrs(self.attrs, attrs),
"template_name": self.template_name,
},
}
def render(self, name, value, attrs=None, renderer=None):
"""Render the widget as an HTML string."""
context = self.get_context(name, value, attrs)
return self._render(self.template_name, context, renderer)
def _render(self, template_name, context, renderer=None):
if renderer is None:
renderer = get_default_renderer()
return mark_safe(renderer.render(template_name, context))
def build_attrs(self, base_attrs, extra_attrs=None):
"""Build an attribute dictionary."""
return {**base_attrs, **(extra_attrs or {})}
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, return the value
of this widget or None if it's not provided.
"""
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in data
def id_for_label(self, id_):
"""
Return the HTML ID attribute of this Widget for use by a <label>, given
the ID of the field. Return an empty string if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
def use_required_attribute(self, initial):
return not self.is_hidden
class Input(Widget):
"""
Base class for all <input> widgets.
"""
input_type = None # Subclasses must define this.
template_name = "django/forms/widgets/input.html"
def __init__(self, attrs=None):
if attrs is not None:
attrs = attrs.copy()
self.input_type = attrs.pop("type", self.input_type)
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["widget"]["type"] = self.input_type
return context
class TextInput(Input):
input_type = "text"
template_name = "django/forms/widgets/text.html"
class NumberInput(Input):
input_type = "number"
template_name = "django/forms/widgets/number.html"
class EmailInput(Input):
input_type = "email"
template_name = "django/forms/widgets/email.html"
class URLInput(Input):
input_type = "url"
template_name = "django/forms/widgets/url.html"
class PasswordInput(Input):
input_type = "password"
template_name = "django/forms/widgets/password.html"
def __init__(self, attrs=None, render_value=False):
super().__init__(attrs)
self.render_value = render_value
def get_context(self, name, value, attrs):
if not self.render_value:
value = None
return super().get_context(name, value, attrs)
class HiddenInput(Input):
input_type = "hidden"
template_name = "django/forms/widgets/hidden.html"
class MultipleHiddenInput(HiddenInput):
"""
Handle <input type="hidden"> for fields that have a list
of values.
"""
template_name = "django/forms/widgets/multiple_hidden.html"
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
final_attrs = context["widget"]["attrs"]
id_ = context["widget"]["attrs"].get("id")
subwidgets = []
for index, value_ in enumerate(context["widget"]["value"]):
widget_attrs = final_attrs.copy()
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
widget_attrs["id"] = "%s_%s" % (id_, index)
widget = HiddenInput()
widget.is_required = self.is_required
subwidgets.append(widget.get_context(name, value_, widget_attrs)["widget"])
context["widget"]["subwidgets"] = subwidgets
return context
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def format_value(self, value):
return [] if value is None else value
class FileInput(Input):
input_type = "file"
needs_multipart_form = True
template_name = "django/forms/widgets/file.html"
def format_value(self, value):
"""File input never renders a value."""
return
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in files
def use_required_attribute(self, initial):
return super().use_required_attribute(initial) and not initial
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
clear_checkbox_label = _("Clear")
initial_text = _("Currently")
input_text = _("Change")
template_name = "django/forms/widgets/clearable_file_input.html"
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + "-clear"
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + "_id"
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
return bool(value and getattr(value, "url", False))
def format_value(self, value):
"""
Return the file object if it has a defined url attribute.
"""
if self.is_initial(value):
return value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
context["widget"].update(
{
"checkbox_name": checkbox_name,
"checkbox_id": checkbox_id,
"is_initial": self.is_initial(value),
"input_text": self.input_text,
"initial_text": self.initial_text,
"clear_checkbox_label": self.clear_checkbox_label,
}
)
context["widget"]["attrs"].setdefault("disabled", False)
return context
def value_from_datadict(self, data, files, name):
upload = super().value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)
):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
def value_omitted_from_data(self, data, files, name):
return (
super().value_omitted_from_data(data, files, name)
and self.clear_checkbox_name(name) not in data
)
class Textarea(Widget):
template_name = "django/forms/widgets/textarea.html"
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {"cols": "40", "rows": "10"}
if attrs:
default_attrs.update(attrs)
super().__init__(default_attrs)
class DateTimeBaseInput(TextInput):
format_key = ""
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super().__init__(attrs)
self.format = format or None
def format_value(self, value):
return formats.localize_input(
value, self.format or formats.get_format(self.format_key)[0]
)
class DateInput(DateTimeBaseInput):
format_key = "DATE_INPUT_FORMATS"
template_name = "django/forms/widgets/date.html"
class DateTimeInput(DateTimeBaseInput):
format_key = "DATETIME_INPUT_FORMATS"
template_name = "django/forms/widgets/datetime.html"
class TimeInput(DateTimeBaseInput):
format_key = "TIME_INPUT_FORMATS"
template_name = "django/forms/widgets/time.html"
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == "")
class CheckboxInput(Input):
input_type = "checkbox"
template_name = "django/forms/widgets/checkbox.html"
def __init__(self, attrs=None, check_test=None):
super().__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def format_value(self, value):
"""Only return the 'value' attribute if value isn't empty."""
if value is True or value is False or value is None or value == "":
return
return str(value)
def get_context(self, name, value, attrs):
if self.check_test(value):
attrs = {**(attrs or {}), "checked": True}
return super().get_context(name, value, attrs)
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {"true": True, "false": False}
if isinstance(value, str):
value = values.get(value.lower(), value)
return bool(value)
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
class ChoiceWidget(Widget):
allow_multiple_selected = False
input_type = None
template_name = None
option_template_name = None
add_id_index = True
checked_attribute = {"checked": True}
option_inherits_attrs = True
def __init__(self, attrs=None, choices=()):
super().__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def subwidgets(self, name, value, attrs=None):
"""
Yield all "subwidgets" of this widget. Used to enable iterating
options from a BoundField for choice widgets.
"""
value = self.format_value(value)
yield from self.options(name, value, attrs)
def options(self, name, value, attrs=None):
"""Yield a flat list of options for this widget."""
for group in self.optgroups(name, value, attrs):
yield from group[1]
def optgroups(self, name, value, attrs=None):
"""Return a list of optgroups for this widget."""
groups = []
has_selected = False
for index, (option_value, option_label) in enumerate(self.choices):
if option_value is None:
option_value = ""
subgroup = []
if isinstance(option_label, (list, tuple)):
group_name = option_value
subindex = 0
choices = option_label
else:
group_name = None
subindex = None
choices = [(option_value, option_label)]
groups.append((group_name, subgroup, index))
for subvalue, sublabel in choices:
selected = (not has_selected or self.allow_multiple_selected) and str(
subvalue
) in value
has_selected |= selected
subgroup.append(
self.create_option(
name,
subvalue,
sublabel,
selected,
index,
subindex=subindex,
attrs=attrs,
)
)
if subindex is not None:
subindex += 1
return groups
def create_option(
self, name, value, label, selected, index, subindex=None, attrs=None
):
index = str(index) if subindex is None else "%s_%s" % (index, subindex)
option_attrs = (
self.build_attrs(self.attrs, attrs) if self.option_inherits_attrs else {}
)
if selected:
option_attrs.update(self.checked_attribute)
if "id" in option_attrs:
option_attrs["id"] = self.id_for_label(option_attrs["id"], index)
return {
"name": name,
"value": value,
"label": label,
"selected": selected,
"index": index,
"attrs": option_attrs,
"type": self.input_type,
"template_name": self.option_template_name,
"wrap_label": True,
}
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["widget"]["optgroups"] = self.optgroups(
name, context["widget"]["value"], attrs
)
return context
def id_for_label(self, id_, index="0"):
"""
Use an incremented id for each option where the main widget
references the zero index.
"""
if id_ and self.add_id_index:
id_ = "%s_%s" % (id_, index)
return id_
def value_from_datadict(self, data, files, name):
getter = data.get
if self.allow_multiple_selected:
try:
getter = data.getlist
except AttributeError:
pass
return getter(name)
def format_value(self, value):
"""Return selected values as a list."""
if value is None and self.allow_multiple_selected:
return []
if not isinstance(value, (tuple, list)):
value = [value]
return [str(v) if v is not None else "" for v in value]
class Select(ChoiceWidget):
input_type = "select"
template_name = "django/forms/widgets/select.html"
option_template_name = "django/forms/widgets/select_option.html"
add_id_index = False
checked_attribute = {"selected": True}
option_inherits_attrs = False
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.allow_multiple_selected:
context["widget"]["attrs"]["multiple"] = True
return context
@staticmethod
def _choice_has_empty_value(choice):
"""Return True if the choice's value is empty string or None."""
value, _ = choice
return value is None or value == ""
def use_required_attribute(self, initial):
"""
Don't render 'required' if the first <option> has a value, as that's
invalid HTML.
"""
use_required_attribute = super().use_required_attribute(initial)
# 'required' is always okay for <select multiple>.
if self.allow_multiple_selected:
return use_required_attribute
first_choice = next(iter(self.choices), None)
return (
use_required_attribute
and first_choice is not None
and self._choice_has_empty_value(first_choice)
)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (
("unknown", _("Unknown")),
("true", _("Yes")),
("false", _("No")),
)
super().__init__(attrs, choices)
def format_value(self, value):
try:
return {
True: "true",
False: "false",
"true": "true",
"false": "false",
# For backwards compatibility with Django < 2.2.
"2": "true",
"3": "false",
}[value]
except KeyError:
return "unknown"
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {
True: True,
"True": True,
"False": False,
False: False,
"true": True,
"false": False,
# For backwards compatibility with Django < 2.2.
"2": True,
"3": False,
}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def value_omitted_from_data(self, data, files, name):
# An unselected <select multiple> doesn't appear in POST data, so it's
# never known if the value is actually omitted.
return False
class RadioSelect(ChoiceWidget):
input_type = "radio"
template_name = "django/forms/widgets/radio.html"
option_template_name = "django/forms/widgets/radio_option.html"
use_fieldset = True
def id_for_label(self, id_, index=None):
"""
Don't include for="field_0" in <label> to improve accessibility when
using a screen reader, in addition clicking such a label would toggle
the first input.
"""
if index is None:
return ""
return super().id_for_label(id_, index)
class CheckboxSelectMultiple(RadioSelect):
allow_multiple_selected = True
input_type = "checkbox"
template_name = "django/forms/widgets/checkbox_select.html"
option_template_name = "django/forms/widgets/checkbox_option.html"
def use_required_attribute(self, initial):
# Don't use the 'required' attribute because browser validation would
# require all checkboxes to be checked instead of at least one.
return False
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
In addition to the values added by Widget.get_context(), this widget
adds a list of subwidgets to the context as widget['subwidgets'].
These can be looped over and rendered like normal widgets.
You'll probably want to use this class with MultiValueField.
"""
template_name = "django/forms/widgets/multiwidget.html"
use_fieldset = True
def __init__(self, widgets, attrs=None):
if isinstance(widgets, dict):
self.widgets_names = [("_%s" % name) if name else "" for name in widgets]
widgets = widgets.values()
else:
self.widgets_names = ["_%s" % i for i in range(len(widgets))]
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super().__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list/tuple of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, (list, tuple)):
value = self.decompress(value)
final_attrs = context["widget"]["attrs"]
input_type = final_attrs.pop("type", None)
id_ = final_attrs.get("id")
subwidgets = []
for i, (widget_name, widget) in enumerate(
zip(self.widgets_names, self.widgets)
):
if input_type is not None:
widget.input_type = input_type
widget_name = name + widget_name
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
widget_attrs = final_attrs.copy()
widget_attrs["id"] = "%s_%s" % (id_, i)
else:
widget_attrs = final_attrs
subwidgets.append(
widget.get_context(widget_name, widget_value, widget_attrs)["widget"]
)
context["widget"]["subwidgets"] = subwidgets
return context
def id_for_label(self, id_):
return ""
def value_from_datadict(self, data, files, name):
return [
widget.value_from_datadict(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
]
def value_omitted_from_data(self, data, files, name):
return all(
widget.value_omitted_from_data(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
)
def decompress(self, value):
"""
Return a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError("Subclasses must implement this method.")
def _get_media(self):
"""
Media for a multiwidget is the combination of all media of the
subwidgets.
"""
media = Media()
for w in self.widgets:
media += w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super().__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
template_name = "django/forms/widgets/splitdatetime.html"
def __init__(
self,
attrs=None,
date_format=None,
time_format=None,
date_attrs=None,
time_attrs=None,
):
widgets = (
DateInput(
attrs=attrs if date_attrs is None else date_attrs,
format=date_format,
),
TimeInput(
attrs=attrs if time_attrs is None else time_attrs,
format=time_format,
),
)
super().__init__(widgets)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time()]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A widget that splits datetime input into two <input type="hidden"> inputs.
"""
template_name = "django/forms/widgets/splithiddendatetime.html"
def __init__(
self,
attrs=None,
date_format=None,
time_format=None,
date_attrs=None,
time_attrs=None,
):
super().__init__(attrs, date_format, time_format, date_attrs, time_attrs)
for widget in self.widgets:
widget.input_type = "hidden"
class SelectDateWidget(Widget):
"""
A widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = ("", "---")
month_field = "%s_month"
day_field = "%s_day"
year_field = "%s_year"
template_name = "django/forms/widgets/select_date.html"
input_type = "select"
select_widget = Select
date_re = _lazy_re_compile(r"(\d{4}|0)-(\d\d?)-(\d\d?)$")
use_fieldset = True
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError("empty_label list/tuple must have 3 elements.")
self.year_none_value = ("", empty_label[0])
self.month_none_value = ("", empty_label[1])
self.day_none_value = ("", empty_label[2])
else:
if empty_label is not None:
self.none_value = ("", empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
date_context = {}
year_choices = [(i, str(i)) for i in self.years]
if not self.is_required:
year_choices.insert(0, self.year_none_value)
year_name = self.year_field % name
date_context["year"] = self.select_widget(
attrs, choices=year_choices
).get_context(
name=year_name,
value=context["widget"]["value"]["year"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % year_name},
)
month_choices = list(self.months.items())
if not self.is_required:
month_choices.insert(0, self.month_none_value)
month_name = self.month_field % name
date_context["month"] = self.select_widget(
attrs, choices=month_choices
).get_context(
name=month_name,
value=context["widget"]["value"]["month"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % month_name},
)
day_choices = [(i, i) for i in range(1, 32)]
if not self.is_required:
day_choices.insert(0, self.day_none_value)
day_name = self.day_field % name
date_context["day"] = self.select_widget(
attrs,
choices=day_choices,
).get_context(
name=day_name,
value=context["widget"]["value"]["day"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % day_name},
)
subwidgets = []
for field in self._parse_date_fmt():
subwidgets.append(date_context[field]["widget"])
context["widget"]["subwidgets"] = subwidgets
return context
def format_value(self, value):
"""
Return a dict containing the year, month, and day of the current value.
Use dict instead of a datetime to allow invalid dates such as February
31 to display correctly.
"""
year, month, day = None, None, None
if isinstance(value, (datetime.date, datetime.datetime)):
year, month, day = value.year, value.month, value.day
elif isinstance(value, str):
match = self.date_re.match(value)
if match:
# Convert any zeros in the date to empty strings to match the
# empty option value.
year, month, day = [int(val) or "" for val in match.groups()]
else:
input_format = get_format("DATE_INPUT_FORMATS")[0]
try:
d = datetime.datetime.strptime(value, input_format)
except ValueError:
pass
else:
year, month, day = d.year, d.month, d.day
return {"year": year, "month": month, "day": day}
@staticmethod
def _parse_date_fmt():
fmt = get_format("DATE_FORMAT")
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == "\\":
escaped = True
elif char in "Yy":
yield "year"
elif char in "bEFMmNn":
yield "month"
elif char in "dj":
yield "day"
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return "%s_%s" % (id_, first_select)
return "%s_month" % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "":
return None
if y is not None and m is not None and d is not None:
input_format = get_format("DATE_INPUT_FORMATS")[0]
input_format = formats.sanitize_strftime_format(input_format)
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
# Return pseudo-ISO dates with zeros for any unselected values,
# e.g. '2017-0-23'.
return "%s-%s-%s" % (y or 0, m or 0, d or 0)
return date_value.strftime(input_format)
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return not any(
("{}_{}".format(name, interval) in data)
for interval in ("year", "month", "day")
)
|
4589efd2d06471a67af47c9d3a3591940198bacdf0d1469a790c782ee24c2ffa | import json
from collections import UserList
from django.conf import settings
from django.core.exceptions import ValidationError
from django.forms.renderers import get_default_renderer
from django.utils import timezone
from django.utils.html import escape, format_html_join
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
def pretty_name(name):
"""Convert 'first_name' to 'First name'."""
if not name:
return ""
return name.replace("_", " ").capitalize()
def flatatt(attrs):
"""
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string.
The result is passed through 'mark_safe' (by way of 'format_html_join').
"""
key_value_attrs = []
boolean_attrs = []
for attr, value in attrs.items():
if isinstance(value, bool):
if value:
boolean_attrs.append((attr,))
elif value is not None:
key_value_attrs.append((attr, value))
return format_html_join("", ' {}="{}"', sorted(key_value_attrs)) + format_html_join(
"", " {}", sorted(boolean_attrs)
)
class RenderableMixin:
def get_context(self):
raise NotImplementedError(
"Subclasses of RenderableMixin must provide a get_context() method."
)
def render(self, template_name=None, context=None, renderer=None):
renderer = renderer or self.renderer
template = template_name or self.template_name
context = context or self.get_context()
return mark_safe(renderer.render(template, context))
__str__ = render
__html__ = render
class RenderableFormMixin(RenderableMixin):
def as_p(self):
"""Render as <p> elements."""
return self.render(self.template_name_p)
def as_table(self):
"""Render as <tr> elements excluding the surrounding <table> tag."""
return self.render(self.template_name_table)
def as_ul(self):
"""Render as <li> elements excluding the surrounding <ul> tag."""
return self.render(self.template_name_ul)
def as_div(self):
"""Render as <div> elements."""
return self.render(self.template_name_div)
class RenderableErrorMixin(RenderableMixin):
def as_json(self, escape_html=False):
return json.dumps(self.get_json_data(escape_html))
def as_text(self):
return self.render(self.template_name_text)
def as_ul(self):
return self.render(self.template_name_ul)
class ErrorDict(dict, RenderableErrorMixin):
"""
A collection of errors that knows how to display itself in various formats.
The dictionary keys are the field names, and the values are the errors.
"""
template_name = "django/forms/errors/dict/default.html"
template_name_text = "django/forms/errors/dict/text.txt"
template_name_ul = "django/forms/errors/dict/ul.html"
def __init__(self, *args, renderer=None, **kwargs):
super().__init__(*args, **kwargs)
self.renderer = renderer or get_default_renderer()
def as_data(self):
return {f: e.as_data() for f, e in self.items()}
def get_json_data(self, escape_html=False):
return {f: e.get_json_data(escape_html) for f, e in self.items()}
def get_context(self):
return {
"errors": self.items(),
"error_class": "errorlist",
}
class ErrorList(UserList, list, RenderableErrorMixin):
"""
A collection of errors that knows how to display itself in various formats.
"""
template_name = "django/forms/errors/list/default.html"
template_name_text = "django/forms/errors/list/text.txt"
template_name_ul = "django/forms/errors/list/ul.html"
def __init__(self, initlist=None, error_class=None, renderer=None):
super().__init__(initlist)
if error_class is None:
self.error_class = "errorlist"
else:
self.error_class = "errorlist {}".format(error_class)
self.renderer = renderer or get_default_renderer()
def as_data(self):
return ValidationError(self.data).error_list
def copy(self):
copy = super().copy()
copy.error_class = self.error_class
return copy
def get_json_data(self, escape_html=False):
errors = []
for error in self.as_data():
message = next(iter(error))
errors.append(
{
"message": escape(message) if escape_html else message,
"code": error.code or "",
}
)
return errors
def get_context(self):
return {
"errors": self,
"error_class": self.error_class,
}
def __repr__(self):
return repr(list(self))
def __contains__(self, item):
return item in list(self)
def __eq__(self, other):
return list(self) == other
def __getitem__(self, i):
error = self.data[i]
if isinstance(error, ValidationError):
return next(iter(error))
return error
def __reduce_ex__(self, *args, **kwargs):
# The `list` reduce function returns an iterator as the fourth element
# that is normally used for repopulating. Since we only inherit from
# `list` for `isinstance` backward compatibility (Refs #17413) we
# nullify this iterator as it would otherwise result in duplicate
# entries. (Refs #23594)
info = super(UserList, self).__reduce_ex__(*args, **kwargs)
return info[:3] + (None, None)
# Utilities for time zone support in DateTimeField et al.
def from_current_timezone(value):
"""
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
"""
if settings.USE_TZ and value is not None and timezone.is_naive(value):
current_timezone = timezone.get_current_timezone()
try:
if timezone._datetime_ambiguous_or_imaginary(value, current_timezone):
raise ValueError("Ambiguous or non-existent time.")
return timezone.make_aware(value, current_timezone)
except Exception as exc:
raise ValidationError(
_(
"%(datetime)s couldn’t be interpreted "
"in time zone %(current_timezone)s; it "
"may be ambiguous or it may not exist."
),
code="ambiguous_timezone",
params={"datetime": value, "current_timezone": current_timezone},
) from exc
return value
def to_current_timezone(value):
"""
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
"""
if settings.USE_TZ and value is not None and timezone.is_aware(value):
return timezone.make_naive(value)
return value
|
c3e203d7c462fa2e95c51c6058c9692ee59e306b039fa25d84b2244b184e4446 | import functools
import warnings
from pathlib import Path
from django.conf import settings
from django.template.backends.django import DjangoTemplates
from django.template.loader import get_template
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
@functools.lru_cache
def get_default_renderer():
renderer_class = import_string(settings.FORM_RENDERER)
return renderer_class()
class BaseRenderer:
form_template_name = "django/forms/div.html"
formset_template_name = "django/forms/formsets/div.html"
def get_template(self, template_name):
raise NotImplementedError("subclasses must implement get_template()")
def render(self, template_name, context, request=None):
template = self.get_template(template_name)
return template.render(context, request=request).strip()
class EngineMixin:
def get_template(self, template_name):
return self.engine.get_template(template_name)
@cached_property
def engine(self):
return self.backend(
{
"APP_DIRS": True,
"DIRS": [Path(__file__).parent / self.backend.app_dirname],
"NAME": "djangoforms",
"OPTIONS": {},
}
)
class DjangoTemplates(EngineMixin, BaseRenderer):
"""
Load Django templates from the built-in widget templates in
django/forms/templates and from apps' 'templates' directory.
"""
backend = DjangoTemplates
class Jinja2(EngineMixin, BaseRenderer):
"""
Load Jinja2 templates from the built-in widget templates in
django/forms/jinja2 and from apps' 'jinja2' directory.
"""
@cached_property
def backend(self):
from django.template.backends.jinja2 import Jinja2
return Jinja2
# RemovedInDjango60Warning.
class DjangoDivFormRenderer(DjangoTemplates):
"""
Load Django templates from django/forms/templates and from apps'
'templates' directory and use the 'div.html' template to render forms and
formsets.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"The DjangoDivFormRenderer transitional form renderer is deprecated. Use "
"DjangoTemplates instead.",
RemovedInDjango60Warning,
)
super.__init__(*args, **kwargs)
# RemovedInDjango60Warning.
class Jinja2DivFormRenderer(Jinja2):
"""
Load Jinja2 templates from the built-in widget templates in
django/forms/jinja2 and from apps' 'jinja2' directory.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"The Jinja2DivFormRenderer transitional form renderer is deprecated. Use "
"Jinja2 instead.",
RemovedInDjango60Warning,
)
super.__init__(*args, **kwargs)
class TemplatesSetting(BaseRenderer):
"""
Load templates using template.loader.get_template() which is configured
based on settings.TEMPLATES.
"""
def get_template(self, template_name):
return get_template(template_name)
|
f983186bb0d201d824ddd53aa895099121e9b3201c909911fae190d4bec5c689 | """
Form classes
"""
import copy
import datetime
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
from django.forms.fields import Field, FileField
from django.forms.utils import ErrorDict, ErrorList, RenderableFormMixin
from django.forms.widgets import Media, MediaDefiningClass
from django.utils.datastructures import MultiValueDict
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from .renderers import get_default_renderer
__all__ = ("BaseForm", "Form")
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""Collect Fields declared on the base classes."""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class and remove them from attrs.
attrs["declared_fields"] = {
key: attrs.pop(key)
for key, value in list(attrs.items())
if isinstance(value, Field)
}
new_class = super().__new__(mcs, name, bases, attrs)
# Walk through the MRO.
declared_fields = {}
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, "declared_fields"):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
class BaseForm(RenderableFormMixin):
"""
The main implementation of all the Form logic. Note that this class is
different than Form. See the comments by the Form class for more info. Any
improvements to the form API should be made to this class, not to the Form
class.
"""
default_renderer = None
field_order = None
prefix = None
use_required_attribute = True
template_name_div = "django/forms/div.html"
template_name_p = "django/forms/p.html"
template_name_table = "django/forms/table.html"
template_name_ul = "django/forms/ul.html"
template_name_label = "django/forms/label.html"
def __init__(
self,
data=None,
files=None,
auto_id="id_%s",
prefix=None,
initial=None,
error_class=ErrorList,
label_suffix=None,
empty_permitted=False,
field_order=None,
use_required_attribute=None,
renderer=None,
):
self.is_bound = data is not None or files is not None
self.data = MultiValueDict() if data is None else data
self.files = MultiValueDict() if files is None else files
self.auto_id = auto_id
if prefix is not None:
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(":")
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
self._bound_fields_cache = {}
self.order_fields(self.field_order if field_order is None else field_order)
if use_required_attribute is not None:
self.use_required_attribute = use_required_attribute
if self.empty_permitted and self.use_required_attribute:
raise ValueError(
"The empty_permitted and use_required_attribute arguments may "
"not both be True."
)
# Initialize form renderer. Use a global default if not specified
# either as an argument or as self.default_renderer.
if renderer is None:
if self.default_renderer is None:
renderer = get_default_renderer()
else:
renderer = self.default_renderer
if isinstance(self.default_renderer, type):
renderer = renderer()
self.renderer = renderer
def order_fields(self, field_order):
"""
Rearrange the fields according to field_order.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None,
keep all fields in the order defined in the class. Ignore unknown
fields in field_order to allow disabling fields in form subclasses
without redefining ordering.
"""
if field_order is None:
return
fields = {}
for key in field_order:
try:
fields[key] = self.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(self.fields) # add remaining fields in original order
self.fields = fields
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = self.is_bound and not self._errors
return "<%(cls)s bound=%(bound)s, valid=%(valid)s, fields=(%(fields)s)>" % {
"cls": self.__class__.__name__,
"bound": self.is_bound,
"valid": is_valid,
"fields": ";".join(self.fields),
}
def _bound_items(self):
"""Yield (name, bf) pairs, where bf is a BoundField object."""
for name in self.fields:
yield name, self[name]
def __iter__(self):
"""Yield the form's fields as BoundField objects."""
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"""Return a BoundField with the given name."""
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key '%s' not found in '%s'. Choices are: %s."
% (
name,
self.__class__.__name__,
", ".join(sorted(self.fields)),
)
)
if name not in self._bound_fields_cache:
self._bound_fields_cache[name] = field.get_bound_field(self, name)
return self._bound_fields_cache[name]
@property
def errors(self):
"""Return an ErrorDict for the data provided for the form."""
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""Return True if the form has no errors, or False otherwise."""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Return the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return "%s-%s" % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""Add an 'initial' prefix for checking dynamic initial values."""
return "initial-%s" % self.add_prefix(field_name)
def _widget_data_value(self, widget, html_name):
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
return widget.value_from_datadict(self.data, self.files, html_name)
@property
def template_name(self):
return self.renderer.form_template_name
def get_context(self):
fields = []
hidden_fields = []
top_errors = self.non_field_errors().copy()
for name, bf in self._bound_items():
bf_errors = self.error_class(bf.errors, renderer=self.renderer)
if bf.is_hidden:
if bf_errors:
top_errors += [
_("(Hidden field %(name)s) %(error)s")
% {"name": name, "error": str(e)}
for e in bf_errors
]
hidden_fields.append(bf)
else:
errors_str = str(bf_errors)
fields.append((bf, errors_str))
return {
"form": self,
"fields": fields,
"hidden_fields": hidden_fields,
"errors": top_errors,
}
def non_field_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Return an empty ErrorList if there
are none.
"""
return self.errors.get(
NON_FIELD_ERRORS,
self.error_class(error_class="nonfield", renderer=self.renderer),
)
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If it's None, treat the errors as NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. An "error" can be
either a simple string or an instance of ValidationError with its
message attribute set and a "list or dictionary" can be an actual
`list` or `dict` or an instance of ValidationError with its
`error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, "error_dict"):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'."
% (self.__class__.__name__, field)
)
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(
error_class="nonfield", renderer=self.renderer
)
else:
self._errors[field] = self.error_class(renderer=self.renderer)
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
return field in self.errors and (
code is None
or any(error.code == code for error in self.errors.as_data()[field])
)
def full_clean(self):
"""
Clean all of self.data and populate self._errors and self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, bf in self._bound_items():
field = bf.field
value = bf.initial if field.disabled else bf.data
try:
if isinstance(field, FileField):
value = field.clean(value, bf.initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, "clean_%s" % name):
value = getattr(self, "clean_%s" % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() has been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""Return True if data differs from initial."""
return bool(self.changed_data)
@cached_property
def changed_data(self):
return [name for name, bf in self._bound_items() if bf._has_changed()]
@property
def media(self):
"""Return all media required to render the widgets on this form."""
media = Media()
for field in self.fields.values():
media += field.widget.media
return media
def is_multipart(self):
"""
Return True if the form needs to be multipart-encoded, i.e. it has
FileInput, or False otherwise.
"""
return any(field.widget.needs_multipart_form for field in self.fields.values())
def hidden_fields(self):
"""
Return a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Return a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
def get_initial_for_field(self, field, field_name):
"""
Return initial data for field on form. Use initial data from the form
or the field, in that order. Evaluate callable values.
"""
value = self.initial.get(field_name, field.initial)
if callable(value):
value = value()
# If this is an auto-generated default date, nix the microseconds
# for standardized handling. See #22502.
if (
isinstance(value, (datetime.datetime, datetime.time))
and not field.widget.supports_microseconds
):
value = value.replace(microsecond=0)
return value
class Form(BaseForm, metaclass=DeclarativeFieldsMetaclass):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
|
c2423a2e5dd5a2708e4e6bdebb3f150606544fe82573f27b9ec8749439da2c9f | import ipaddress
import math
import re
from pathlib import Path
from urllib.parse import urlsplit, urlunsplit
from django.core.exceptions import ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.encoding import punycode
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, "", [], (), {})
@deconstructible
class RegexValidator:
regex = ""
message = _("Enter a valid value.")
code = "invalid"
inverse_match = False
flags = 0
def __init__(
self, regex=None, message=None, code=None, inverse_match=None, flags=None
):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, str):
raise TypeError(
"If the flags are set, regex must be a regular expression string."
)
self.regex = _lazy_re_compile(self.regex, self.flags)
def __call__(self, value):
"""
Validate that the input contains (or does *not* contain, if
inverse_match is True) a match for the regular expression.
"""
regex_matches = self.regex.search(str(value))
invalid_input = regex_matches if self.inverse_match else not regex_matches
if invalid_input:
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, RegexValidator)
and self.regex.pattern == other.regex.pattern
and self.regex.flags == other.regex.flags
and (self.message == other.message)
and (self.code == other.code)
and (self.inverse_match == other.inverse_match)
)
@deconstructible
class URLValidator(RegexValidator):
ul = "\u00a1-\uffff" # Unicode letters range (must not be a raw string).
# IP patterns
ipv4_re = (
r"(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)"
r"(?:\.(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)){3}"
)
ipv6_re = r"\[[0-9a-f:.]+\]" # (simple regex, validated later)
# Host patterns
hostname_re = (
r"[a-z" + ul + r"0-9](?:[a-z" + ul + r"0-9-]{0,61}[a-z" + ul + r"0-9])?"
)
# Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1
domain_re = r"(?:\.(?!-)[a-z" + ul + r"0-9-]{1,63}(?<!-))*"
tld_re = (
r"\." # dot
r"(?!-)" # can't start with a dash
r"(?:[a-z" + ul + "-]{2,63}" # domain label
r"|xn--[a-z0-9]{1,59})" # or punycode label
r"(?<!-)" # can't end with a dash
r"\.?" # may have a trailing dot
)
host_re = "(" + hostname_re + domain_re + tld_re + "|localhost)"
regex = _lazy_re_compile(
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
r"(?:" + ipv4_re + "|" + ipv6_re + "|" + host_re + ")"
r"(?::[0-9]{1,5})?" # port
r"(?:[/?#][^\s]*)?" # resource path
r"\Z",
re.IGNORECASE,
)
message = _("Enter a valid URL.")
schemes = ["http", "https", "ftp", "ftps"]
unsafe_chars = frozenset("\t\r\n")
def __init__(self, schemes=None, **kwargs):
super().__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
if not isinstance(value, str):
raise ValidationError(self.message, code=self.code, params={"value": value})
if self.unsafe_chars.intersection(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
# Check if the scheme is valid.
scheme = value.split("://")[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code, params={"value": value})
# Then check full URL
try:
splitted_url = urlsplit(value)
except ValueError:
raise ValidationError(self.message, code=self.code, params={"value": value})
try:
super().__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
scheme, netloc, path, query, fragment = splitted_url
try:
netloc = punycode(netloc) # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super().__call__(url)
else:
raise
else:
# Now verify IPv6 in the netloc part
host_match = re.search(r"^\[(.+)\](?::[0-9]{1,5})?$", splitted_url.netloc)
if host_match:
potential_ip = host_match[1]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(
self.message, code=self.code, params={"value": value}
)
# The maximum length of a full host name is 253 characters per RFC 1034
# section 3.1. It's defined to be 255 bytes or less, but this includes
# one byte for the length of the name and one byte for the trailing dot
# that's used to indicate absolute names in DNS.
if splitted_url.hostname is None or len(splitted_url.hostname) > 253:
raise ValidationError(self.message, code=self.code, params={"value": value})
integer_validator = RegexValidator(
_lazy_re_compile(r"^-?\d+\Z"),
message=_("Enter a valid integer."),
code="invalid",
)
def validate_integer(value):
return integer_validator(value)
@deconstructible
class EmailValidator:
message = _("Enter a valid email address.")
code = "invalid"
user_regex = _lazy_re_compile(
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])'
r'*"\Z)',
re.IGNORECASE,
)
domain_regex = _lazy_re_compile(
# max length for domain name labels is 63 characters per RFC 1034
r"((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z",
re.IGNORECASE,
)
literal_regex = _lazy_re_compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r"\[([A-F0-9:.]+)\]\Z",
re.IGNORECASE,
)
domain_allowlist = ["localhost"]
def __init__(self, message=None, code=None, allowlist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if allowlist is not None:
self.domain_allowlist = allowlist
def __call__(self, value):
if not value or "@" not in value:
raise ValidationError(self.message, code=self.code, params={"value": value})
user_part, domain_part = value.rsplit("@", 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code, params={"value": value})
if domain_part not in self.domain_allowlist and not self.validate_domain_part(
domain_part
):
# Try for possible IDN domain-part
try:
domain_part = punycode(domain_part)
except UnicodeError:
pass
else:
if self.validate_domain_part(domain_part):
return
raise ValidationError(self.message, code=self.code, params={"value": value})
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match[1]
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator)
and (self.domain_allowlist == other.domain_allowlist)
and (self.message == other.message)
and (self.code == other.code)
)
validate_email = EmailValidator()
slug_re = _lazy_re_compile(r"^[-a-zA-Z0-9_]+\Z")
validate_slug = RegexValidator(
slug_re,
# Translators: "letters" means latin letters: a-z and A-Z.
_("Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."),
"invalid",
)
slug_unicode_re = _lazy_re_compile(r"^[-\w]+\Z")
validate_unicode_slug = RegexValidator(
slug_unicode_re,
_(
"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
"hyphens."
),
"invalid",
)
def validate_ipv4_address(value):
try:
ipaddress.IPv4Address(value)
except ValueError:
raise ValidationError(
_("Enter a valid IPv4 address."), code="invalid", params={"value": value}
)
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(
_("Enter a valid IPv6 address."), code="invalid", params={"value": value}
)
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(
_("Enter a valid IPv4 or IPv6 address."),
code="invalid",
params={"value": value},
)
ip_address_validator_map = {
"both": ([validate_ipv46_address], _("Enter a valid IPv4 or IPv6 address.")),
"ipv4": ([validate_ipv4_address], _("Enter a valid IPv4 address.")),
"ipv6": ([validate_ipv6_address], _("Enter a valid IPv6 address.")),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
"""
if protocol != "both" and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'"
)
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError(
"The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map))
)
def int_list_validator(sep=",", message=None, code="invalid", allow_negative=False):
regexp = _lazy_re_compile(
r"^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z"
% {
"neg": "(-)?" if allow_negative else "",
"sep": re.escape(sep),
}
)
return RegexValidator(regexp, message=message, code=code)
validate_comma_separated_integer_list = int_list_validator(
message=_("Enter only digits separated by commas."),
)
@deconstructible
class BaseValidator:
message = _("Ensure this value is %(limit_value)s (it is %(show_value)s).")
code = "limit_value"
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
limit_value = (
self.limit_value() if callable(self.limit_value) else self.limit_value
)
params = {"limit_value": limit_value, "show_value": cleaned, "value": value}
if self.compare(cleaned, limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.limit_value == other.limit_value
and self.message == other.message
and self.code == other.code
)
def compare(self, a, b):
return a is not b
def clean(self, x):
return x
@deconstructible
class MaxValueValidator(BaseValidator):
message = _("Ensure this value is less than or equal to %(limit_value)s.")
code = "max_value"
def compare(self, a, b):
return a > b
@deconstructible
class MinValueValidator(BaseValidator):
message = _("Ensure this value is greater than or equal to %(limit_value)s.")
code = "min_value"
def compare(self, a, b):
return a < b
@deconstructible
class StepValueValidator(BaseValidator):
message = _("Ensure this value is a multiple of step size %(limit_value)s.")
code = "step_size"
def compare(self, a, b):
return not math.isclose(math.remainder(a, b), 0, abs_tol=1e-9)
@deconstructible
class MinLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at least %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at least %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "min_length"
def compare(self, a, b):
return a < b
def clean(self, x):
return len(x)
@deconstructible
class MaxLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at most %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at most %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "max_length"
def compare(self, a, b):
return a > b
def clean(self, x):
return len(x)
@deconstructible
class DecimalValidator:
"""
Validate that the input does not exceed the maximum number of digits
expected, otherwise raise ValidationError.
"""
messages = {
"invalid": _("Enter a number."),
"max_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit in total.",
"Ensure that there are no more than %(max)s digits in total.",
"max",
),
"max_decimal_places": ngettext_lazy(
"Ensure that there are no more than %(max)s decimal place.",
"Ensure that there are no more than %(max)s decimal places.",
"max",
),
"max_whole_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit before the decimal "
"point.",
"Ensure that there are no more than %(max)s digits before the decimal "
"point.",
"max",
),
}
def __init__(self, max_digits, decimal_places):
self.max_digits = max_digits
self.decimal_places = decimal_places
def __call__(self, value):
digit_tuple, exponent = value.as_tuple()[1:]
if exponent in {"F", "n", "N"}:
raise ValidationError(
self.messages["invalid"], code="invalid", params={"value": value}
)
if exponent >= 0:
digits = len(digit_tuple)
if digit_tuple != (0,):
# A positive exponent adds that many trailing zeros.
digits += exponent
decimals = 0
else:
# If the absolute value of the negative exponent is larger than the
# number of digits, then it's the same as the number of digits,
# because it'll consume all of the digits in digit_tuple and then
# add abs(exponent) - len(digit_tuple) leading zeros after the
# decimal point.
if abs(exponent) > len(digit_tuple):
digits = decimals = abs(exponent)
else:
digits = len(digit_tuple)
decimals = abs(exponent)
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.messages["max_digits"],
code="max_digits",
params={"max": self.max_digits, "value": value},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.messages["max_decimal_places"],
code="max_decimal_places",
params={"max": self.decimal_places, "value": value},
)
if (
self.max_digits is not None
and self.decimal_places is not None
and whole_digits > (self.max_digits - self.decimal_places)
):
raise ValidationError(
self.messages["max_whole_digits"],
code="max_whole_digits",
params={"max": (self.max_digits - self.decimal_places), "value": value},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.max_digits == other.max_digits
and self.decimal_places == other.decimal_places
)
@deconstructible
class FileExtensionValidator:
message = _(
"File extension “%(extension)s” is not allowed. "
"Allowed extensions are: %(allowed_extensions)s."
)
code = "invalid_extension"
def __init__(self, allowed_extensions=None, message=None, code=None):
if allowed_extensions is not None:
allowed_extensions = [
allowed_extension.lower() for allowed_extension in allowed_extensions
]
self.allowed_extensions = allowed_extensions
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
extension = Path(value.name).suffix[1:].lower()
if (
self.allowed_extensions is not None
and extension not in self.allowed_extensions
):
raise ValidationError(
self.message,
code=self.code,
params={
"extension": extension,
"allowed_extensions": ", ".join(self.allowed_extensions),
"value": value,
},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.allowed_extensions == other.allowed_extensions
and self.message == other.message
and self.code == other.code
)
def get_available_image_extensions():
try:
from PIL import Image
except ImportError:
return []
else:
Image.init()
return [ext.lower()[1:] for ext in Image.EXTENSION]
def validate_image_file_extension(value):
return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(
value
)
@deconstructible
class ProhibitNullCharactersValidator:
"""Validate that the string doesn't contain the null character."""
message = _("Null characters are not allowed.")
code = "null_characters_not_allowed"
def __init__(self, message=None, code=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
if "\x00" in str(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.message == other.message
and self.code == other.code
)
|
35841f2986b3b3a598c67c1382842ddb094cf7cc512b849ca06918158e51a91a | import datetime
import io
import json
import mimetypes
import os
import re
import sys
import time
import warnings
from email.header import Header
from http.client import responses
from urllib.parse import urlparse
from asgiref.sync import async_to_sync, sync_to_async
from django.conf import settings
from django.core import signals, signing
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.http.cookie import SimpleCookie
from django.utils import timezone
from django.utils.datastructures import CaseInsensitiveMapping
from django.utils.encoding import iri_to_uri
from django.utils.http import content_disposition_header, http_date
from django.utils.regex_helper import _lazy_re_compile
_charset_from_content_type_re = _lazy_re_compile(
r";\s*charset=(?P<charset>[^\s;]+)", re.I
)
class ResponseHeaders(CaseInsensitiveMapping):
def __init__(self, data):
"""
Populate the initial data using __setitem__ to ensure values are
correctly encoded.
"""
self._store = {}
if data:
for header, value in self._unpack_items(data):
self[header] = value
def _convert_to_charset(self, value, charset, mime_encode=False):
"""
Convert headers key/value to ascii/latin-1 native strings.
`charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and
`value` can't be represented in the given charset, apply MIME-encoding.
"""
try:
if isinstance(value, str):
# Ensure string is valid in given charset
value.encode(charset)
elif isinstance(value, bytes):
# Convert bytestring using given charset
value = value.decode(charset)
else:
value = str(value)
# Ensure string is valid in given charset.
value.encode(charset)
if "\n" in value or "\r" in value:
raise BadHeaderError(
f"Header values can't contain newlines (got {value!r})"
)
except UnicodeError as e:
# Encoding to a string of the specified charset failed, but we
# don't know what type that value was, or if it contains newlines,
# which we may need to check for before sending it to be
# encoded for multiple character sets.
if (isinstance(value, bytes) and (b"\n" in value or b"\r" in value)) or (
isinstance(value, str) and ("\n" in value or "\r" in value)
):
raise BadHeaderError(
f"Header values can't contain newlines (got {value!r})"
) from e
if mime_encode:
value = Header(value, "utf-8", maxlinelen=sys.maxsize).encode()
else:
e.reason += ", HTTP response headers must be in %s format" % charset
raise
return value
def __delitem__(self, key):
self.pop(key)
def __setitem__(self, key, value):
key = self._convert_to_charset(key, "ascii")
value = self._convert_to_charset(value, "latin-1", mime_encode=True)
self._store[key.lower()] = (key, value)
def pop(self, key, default=None):
return self._store.pop(key.lower(), default)
def setdefault(self, key, value):
if key not in self:
self[key] = value
class BadHeaderError(ValueError):
pass
class HttpResponseBase:
"""
An HTTP response base class with dictionary-accessed headers.
This class doesn't handle content. It should not be used directly.
Use the HttpResponse and StreamingHttpResponse subclasses instead.
"""
status_code = 200
def __init__(
self, content_type=None, status=None, reason=None, charset=None, headers=None
):
self.headers = ResponseHeaders(headers)
self._charset = charset
if "Content-Type" not in self.headers:
if content_type is None:
content_type = f"text/html; charset={self.charset}"
self.headers["Content-Type"] = content_type
elif content_type:
raise ValueError(
"'headers' must not contain 'Content-Type' when the "
"'content_type' parameter is provided."
)
self._resource_closers = []
# This parameter is set by the handler. It's necessary to preserve the
# historical behavior of request_finished.
self._handler_class = None
self.cookies = SimpleCookie()
self.closed = False
if status is not None:
try:
self.status_code = int(status)
except (ValueError, TypeError):
raise TypeError("HTTP status code must be an integer.")
if not 100 <= self.status_code <= 599:
raise ValueError("HTTP status code must be an integer from 100 to 599.")
self._reason_phrase = reason
@property
def reason_phrase(self):
if self._reason_phrase is not None:
return self._reason_phrase
# Leave self._reason_phrase unset in order to use the default
# reason phrase for status code.
return responses.get(self.status_code, "Unknown Status Code")
@reason_phrase.setter
def reason_phrase(self, value):
self._reason_phrase = value
@property
def charset(self):
if self._charset is not None:
return self._charset
# The Content-Type header may not yet be set, because the charset is
# being inserted *into* it.
if content_type := self.headers.get("Content-Type"):
if matched := _charset_from_content_type_re.search(content_type):
# Extract the charset and strip its double quotes.
# Note that having parsed it from the Content-Type, we don't
# store it back into the _charset for later intentionally, to
# allow for the Content-Type to be switched again later.
return matched["charset"].replace('"', "")
return settings.DEFAULT_CHARSET
@charset.setter
def charset(self, value):
self._charset = value
def serialize_headers(self):
"""HTTP headers as a bytestring."""
return b"\r\n".join(
[
key.encode("ascii") + b": " + value.encode("latin-1")
for key, value in self.headers.items()
]
)
__bytes__ = serialize_headers
@property
def _content_type_for_repr(self):
return (
', "%s"' % self.headers["Content-Type"]
if "Content-Type" in self.headers
else ""
)
def __setitem__(self, header, value):
self.headers[header] = value
def __delitem__(self, header):
del self.headers[header]
def __getitem__(self, header):
return self.headers[header]
def has_header(self, header):
"""Case-insensitive check for a header."""
return header in self.headers
__contains__ = has_header
def items(self):
return self.headers.items()
def get(self, header, alternate=None):
return self.headers.get(header, alternate)
def set_cookie(
self,
key,
value="",
max_age=None,
expires=None,
path="/",
domain=None,
secure=False,
httponly=False,
samesite=None,
):
"""
Set a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then calculate ``max_age``.
``max_age`` can be:
- int/float specifying seconds,
- ``datetime.timedelta`` object.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_naive(expires):
expires = timezone.make_aware(expires, datetime.timezone.utc)
delta = expires - datetime.datetime.now(tz=datetime.timezone.utc)
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta += datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
if max_age is not None:
raise ValueError("'expires' and 'max_age' can't be used together.")
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]["expires"] = expires
else:
self.cookies[key]["expires"] = ""
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
self.cookies[key]["max-age"] = int(max_age)
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]["expires"] = http_date(time.time() + max_age)
if path is not None:
self.cookies[key]["path"] = path
if domain is not None:
self.cookies[key]["domain"] = domain
if secure:
self.cookies[key]["secure"] = True
if httponly:
self.cookies[key]["httponly"] = True
if samesite:
if samesite.lower() not in ("lax", "none", "strict"):
raise ValueError('samesite must be "lax", "none", or "strict".')
self.cookies[key]["samesite"] = samesite
def setdefault(self, key, value):
"""Set a header unless it has already been set."""
self.headers.setdefault(key, value)
def set_signed_cookie(self, key, value, salt="", **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path="/", domain=None, samesite=None):
# Browsers can ignore the Set-Cookie header if the cookie doesn't use
# the secure flag and:
# - the cookie name starts with "__Host-" or "__Secure-", or
# - the samesite is "none".
secure = key.startswith(("__Secure-", "__Host-")) or (
samesite and samesite.lower() == "none"
)
self.set_cookie(
key,
max_age=0,
path=path,
domain=domain,
secure=secure,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
samesite=samesite,
)
# Common methods used by subclasses
def make_bytes(self, value):
"""Turn a value into a bytestring encoded in the output charset."""
# Per PEP 3333, this response body must be bytes. To avoid returning
# an instance of a subclass, this function returns `bytes(value)`.
# This doesn't make a copy when `value` already contains bytes.
# Handle string types -- we can't rely on force_bytes here because:
# - Python attempts str conversion first
# - when self._charset != 'utf-8' it re-encodes the content
if isinstance(value, (bytes, memoryview)):
return bytes(value)
if isinstance(value, str):
return bytes(value.encode(self.charset))
# Handle non-string types.
return str(value).encode(self.charset)
# These methods partially implement the file-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
# The WSGI server must call this method upon completion of the request.
# See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
def close(self):
for closer in self._resource_closers:
try:
closer()
except Exception:
pass
# Free resources that were still referenced.
self._resource_closers.clear()
self.closed = True
signals.request_finished.send(sender=self._handler_class)
def write(self, content):
raise OSError("This %s instance is not writable" % self.__class__.__name__)
def flush(self):
pass
def tell(self):
raise OSError(
"This %s instance cannot tell its position" % self.__class__.__name__
)
# These methods partially implement a stream-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
def readable(self):
return False
def seekable(self):
return False
def writable(self):
return False
def writelines(self, lines):
raise OSError("This %s instance is not writable" % self.__class__.__name__)
class HttpResponse(HttpResponseBase):
"""
An HTTP response class with a string as content.
This content can be read, appended to, or replaced.
"""
streaming = False
non_picklable_attrs = frozenset(
[
"resolver_match",
# Non-picklable attributes added by test clients.
"client",
"context",
"json",
"templates",
]
)
def __init__(self, content=b"", *args, **kwargs):
super().__init__(*args, **kwargs)
# Content is a bytestring. See the `content` property methods.
self.content = content
def __getstate__(self):
obj_dict = self.__dict__.copy()
for attr in self.non_picklable_attrs:
if attr in obj_dict:
del obj_dict[attr]
return obj_dict
def __repr__(self):
return "<%(cls)s status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
}
def serialize(self):
"""Full HTTP message, including headers, as a bytestring."""
return self.serialize_headers() + b"\r\n\r\n" + self.content
__bytes__ = serialize
@property
def content(self):
return b"".join(self._container)
@content.setter
def content(self, value):
# Consume iterators upon assignment to allow repeated iteration.
if hasattr(value, "__iter__") and not isinstance(
value, (bytes, memoryview, str)
):
content = b"".join(self.make_bytes(chunk) for chunk in value)
if hasattr(value, "close"):
try:
value.close()
except Exception:
pass
else:
content = self.make_bytes(value)
# Create a list of properly encoded bytestrings to support write().
self._container = [content]
def __iter__(self):
return iter(self._container)
def write(self, content):
self._container.append(self.make_bytes(content))
def tell(self):
return len(self.content)
def getvalue(self):
return self.content
def writable(self):
return True
def writelines(self, lines):
for line in lines:
self.write(line)
class StreamingHttpResponse(HttpResponseBase):
"""
A streaming HTTP response class with an iterator as content.
This should only be iterated once, when the response is streamed to the
client. However, it can be appended to or replaced with a new iterator
that wraps the original content (or yields entirely new content).
"""
streaming = True
def __init__(self, streaming_content=(), *args, **kwargs):
super().__init__(*args, **kwargs)
# `streaming_content` should be an iterable of bytestrings.
# See the `streaming_content` property methods.
self.streaming_content = streaming_content
def __repr__(self):
return "<%(cls)s status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__qualname__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
}
@property
def content(self):
raise AttributeError(
"This %s instance has no `content` attribute. Use "
"`streaming_content` instead." % self.__class__.__name__
)
@property
def streaming_content(self):
if self.is_async:
# pull to lexical scope to capture fixed reference in case
# streaming_content is set again later.
_iterator = self._iterator
async def awrapper():
async for part in _iterator:
yield self.make_bytes(part)
return awrapper()
else:
return map(self.make_bytes, self._iterator)
@streaming_content.setter
def streaming_content(self, value):
self._set_streaming_content(value)
def _set_streaming_content(self, value):
# Ensure we can never iterate on "value" more than once.
try:
self._iterator = iter(value)
self.is_async = False
except TypeError:
self._iterator = aiter(value)
self.is_async = True
if hasattr(value, "close"):
self._resource_closers.append(value.close)
def __iter__(self):
try:
return iter(self.streaming_content)
except TypeError:
warnings.warn(
"StreamingHttpResponse must consume asynchronous iterators in order to "
"serve them synchronously. Use a synchronous iterator instead.",
Warning,
)
# async iterator. Consume in async_to_sync and map back.
async def to_list(_iterator):
as_list = []
async for chunk in _iterator:
as_list.append(chunk)
return as_list
return map(self.make_bytes, iter(async_to_sync(to_list)(self._iterator)))
async def __aiter__(self):
try:
async for part in self.streaming_content:
yield part
except TypeError:
warnings.warn(
"StreamingHttpResponse must consume synchronous iterators in order to "
"serve them asynchronously. Use an asynchronous iterator instead.",
Warning,
)
# sync iterator. Consume via sync_to_async and yield via async
# generator.
for part in await sync_to_async(list)(self.streaming_content):
yield part
def getvalue(self):
return b"".join(self.streaming_content)
class FileResponse(StreamingHttpResponse):
"""
A streaming HTTP response class optimized for files.
"""
block_size = 4096
def __init__(self, *args, as_attachment=False, filename="", **kwargs):
self.as_attachment = as_attachment
self.filename = filename
self._no_explicit_content_type = (
"content_type" not in kwargs or kwargs["content_type"] is None
)
super().__init__(*args, **kwargs)
def _set_streaming_content(self, value):
if not hasattr(value, "read"):
self.file_to_stream = None
return super()._set_streaming_content(value)
self.file_to_stream = filelike = value
if hasattr(filelike, "close"):
self._resource_closers.append(filelike.close)
value = iter(lambda: filelike.read(self.block_size), b"")
self.set_headers(filelike)
super()._set_streaming_content(value)
def set_headers(self, filelike):
"""
Set some common response headers (Content-Length, Content-Type, and
Content-Disposition) based on the `filelike` response content.
"""
filename = getattr(filelike, "name", "")
filename = filename if isinstance(filename, str) else ""
seekable = hasattr(filelike, "seek") and (
not hasattr(filelike, "seekable") or filelike.seekable()
)
if hasattr(filelike, "tell"):
if seekable:
initial_position = filelike.tell()
filelike.seek(0, io.SEEK_END)
self.headers["Content-Length"] = filelike.tell() - initial_position
filelike.seek(initial_position)
elif hasattr(filelike, "getbuffer"):
self.headers["Content-Length"] = (
filelike.getbuffer().nbytes - filelike.tell()
)
elif os.path.exists(filename):
self.headers["Content-Length"] = (
os.path.getsize(filename) - filelike.tell()
)
elif seekable:
self.headers["Content-Length"] = sum(
iter(lambda: len(filelike.read(self.block_size)), 0)
)
filelike.seek(-int(self.headers["Content-Length"]), io.SEEK_END)
filename = os.path.basename(self.filename or filename)
if self._no_explicit_content_type:
if filename:
content_type, encoding = mimetypes.guess_type(filename)
# Encoding isn't set to prevent browsers from automatically
# uncompressing files.
content_type = {
"bzip2": "application/x-bzip",
"gzip": "application/gzip",
"xz": "application/x-xz",
}.get(encoding, content_type)
self.headers["Content-Type"] = (
content_type or "application/octet-stream"
)
else:
self.headers["Content-Type"] = "application/octet-stream"
if content_disposition := content_disposition_header(
self.as_attachment, filename
):
self.headers["Content-Disposition"] = content_disposition
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ["http", "https", "ftp"]
def __init__(self, redirect_to, *args, **kwargs):
super().__init__(*args, **kwargs)
self["Location"] = iri_to_uri(redirect_to)
parsed = urlparse(str(redirect_to))
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise DisallowedRedirect(
"Unsafe redirect to URL with protocol '%s'" % parsed.scheme
)
url = property(lambda self: self["Location"])
def __repr__(self):
return (
'<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">'
% {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
"url": self.url,
}
)
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self["content-type"]
@HttpResponse.content.setter
def content(self, value):
if value:
raise AttributeError(
"You cannot set content to a 304 (Not Modified) response"
)
self._container = []
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods, *args, **kwargs):
super().__init__(*args, **kwargs)
self["Allow"] = ", ".join(permitted_methods)
def __repr__(self):
return "<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
"methods": self["Allow"],
}
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
class Http404(Exception):
pass
class JsonResponse(HttpResponse):
"""
An HTTP response class that consumes data to be serialized to JSON.
:param data: Data to be dumped into json. By default only ``dict`` objects
are allowed to be passed due to a security flaw before ECMAScript 5. See
the ``safe`` parameter for more information.
:param encoder: Should be a json encoder class. Defaults to
``django.core.serializers.json.DjangoJSONEncoder``.
:param safe: Controls if only ``dict`` objects may be serialized. Defaults
to ``True``.
:param json_dumps_params: A dictionary of kwargs passed to json.dumps().
"""
def __init__(
self,
data,
encoder=DjangoJSONEncoder,
safe=True,
json_dumps_params=None,
**kwargs,
):
if safe and not isinstance(data, dict):
raise TypeError(
"In order to allow non-dict objects to be serialized set the "
"safe parameter to False."
)
if json_dumps_params is None:
json_dumps_params = {}
kwargs.setdefault("content_type", "application/json")
data = json.dumps(data, cls=encoder, **json_dumps_params)
super().__init__(content=data, **kwargs)
|
b69a4ce6fa1e71ab1541e66203c61459a00554c159e0c3e64bf2248ae66df0dc | import codecs
import copy
from io import BytesIO
from itertools import chain
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
from django.conf import settings
from django.core import signing
from django.core.exceptions import (
DisallowedHost,
ImproperlyConfigured,
RequestDataTooBig,
TooManyFieldsSent,
)
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser, MultiPartParserError
from django.utils.datastructures import (
CaseInsensitiveMapping,
ImmutableList,
MultiValueDict,
)
from django.utils.encoding import escape_uri_path, iri_to_uri
from django.utils.functional import cached_property
from django.utils.http import is_same_domain, parse_header_parameters
from django.utils.regex_helper import _lazy_re_compile
RAISE_ERROR = object()
host_validation_re = _lazy_re_compile(
r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9\.:]+\])(:[0-9]+)?$"
)
class UnreadablePostError(OSError):
pass
class RawPostDataException(Exception):
"""
You cannot access raw_post_data from a request that has
multipart/* POST data if it has been accessed via POST,
FILES, etc..
"""
pass
class HttpRequest:
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
non_picklable_attrs = frozenset(["resolver_match", "_stream"])
def __init__(self):
# WARNING: The `WSGIRequest` subclass doesn't call `super`.
# Any variable assignment made here should also happen in
# `WSGIRequest.__init__()`.
self.GET = QueryDict(mutable=True)
self.POST = QueryDict(mutable=True)
self.COOKIES = {}
self.META = {}
self.FILES = MultiValueDict()
self.path = ""
self.path_info = ""
self.method = None
self.resolver_match = None
self.content_type = None
self.content_params = None
def __repr__(self):
if self.method is None or not self.get_full_path():
return "<%s>" % self.__class__.__name__
return "<%s: %s %r>" % (
self.__class__.__name__,
self.method,
self.get_full_path(),
)
def __getstate__(self):
obj_dict = self.__dict__.copy()
for attr in self.non_picklable_attrs:
if attr in obj_dict:
del obj_dict[attr]
return obj_dict
def __deepcopy__(self, memo):
obj = copy.copy(self)
for attr in self.non_picklable_attrs:
if hasattr(self, attr):
setattr(obj, attr, copy.deepcopy(getattr(self, attr), memo))
memo[id(self)] = obj
return obj
@cached_property
def headers(self):
return HttpHeaders(self.META)
@cached_property
def accepted_types(self):
"""Return a list of MediaType instances."""
return parse_accept_header(self.headers.get("Accept", "*/*"))
def accepts(self, media_type):
return any(
accepted_type.match(media_type) for accepted_type in self.accepted_types
)
def _set_content_type_params(self, meta):
"""Set content_type, content_params, and encoding."""
self.content_type, self.content_params = parse_header_parameters(
meta.get("CONTENT_TYPE", "")
)
if "charset" in self.content_params:
try:
codecs.lookup(self.content_params["charset"])
except LookupError:
pass
else:
self.encoding = self.content_params["charset"]
def _get_raw_host(self):
"""
Return the HTTP host using the environment or request headers. Skip
allowed hosts protection, so may return an insecure host.
"""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and ("HTTP_X_FORWARDED_HOST" in self.META):
host = self.META["HTTP_X_FORWARDED_HOST"]
elif "HTTP_HOST" in self.META:
host = self.META["HTTP_HOST"]
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META["SERVER_NAME"]
server_port = self.get_port()
if server_port != ("443" if self.is_secure() else "80"):
host = "%s:%s" % (host, server_port)
return host
def get_host(self):
"""Return the HTTP host using the environment or request headers."""
host = self._get_raw_host()
# Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True.
allowed_hosts = settings.ALLOWED_HOSTS
if settings.DEBUG and not allowed_hosts:
allowed_hosts = [".localhost", "127.0.0.1", "[::1]"]
domain, port = split_domain_port(host)
if domain and validate_host(domain, allowed_hosts):
return host
else:
msg = "Invalid HTTP_HOST header: %r." % host
if domain:
msg += " You may need to add %r to ALLOWED_HOSTS." % domain
else:
msg += (
" The domain name provided is not valid according to RFC 1034/1035."
)
raise DisallowedHost(msg)
def get_port(self):
"""Return the port number for the request as a string."""
if settings.USE_X_FORWARDED_PORT and "HTTP_X_FORWARDED_PORT" in self.META:
port = self.META["HTTP_X_FORWARDED_PORT"]
else:
port = self.META["SERVER_PORT"]
return str(port)
def get_full_path(self, force_append_slash=False):
return self._get_full_path(self.path, force_append_slash)
def get_full_path_info(self, force_append_slash=False):
return self._get_full_path(self.path_info, force_append_slash)
def _get_full_path(self, path, force_append_slash):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return "%s%s%s" % (
escape_uri_path(path),
"/" if force_append_slash and not path.endswith("/") else "",
("?" + iri_to_uri(self.META.get("QUERY_STRING", "")))
if self.META.get("QUERY_STRING", "")
else "",
)
def get_signed_cookie(self, key, default=RAISE_ERROR, salt="", max_age=None):
"""
Attempt to return a signed cookie. If the signature fails or the
cookie has expired, raise an exception, unless the `default` argument
is provided, in which case return that value.
"""
try:
cookie_value = self.COOKIES[key]
except KeyError:
if default is not RAISE_ERROR:
return default
else:
raise
try:
value = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=max_age
)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
return value
def build_absolute_uri(self, location=None):
"""
Build an absolute URI from the location and the variables available in
this request. If no ``location`` is specified, build the absolute URI
using request.get_full_path(). If the location is absolute, convert it
to an RFC 3987 compliant URI and return it. If location is relative or
is scheme-relative (i.e., ``//example.com/``), urljoin() it to a base
URL constructed from the request variables.
"""
if location is None:
# Make it an absolute url (but schemeless and domainless) for the
# edge case that the path starts with '//'.
location = "//%s" % self.get_full_path()
else:
# Coerce lazy locations.
location = str(location)
bits = urlsplit(location)
if not (bits.scheme and bits.netloc):
# Handle the simple, most common case. If the location is absolute
# and a scheme or host (netloc) isn't provided, skip an expensive
# urljoin() as long as no path segments are '.' or '..'.
if (
bits.path.startswith("/")
and not bits.scheme
and not bits.netloc
and "/./" not in bits.path
and "/../" not in bits.path
):
# If location starts with '//' but has no netloc, reuse the
# schema and netloc from the current request. Strip the double
# slashes and continue as if it wasn't specified.
location = self._current_scheme_host + location.removeprefix("//")
else:
# Join the constructed URL with the provided location, which
# allows the provided location to apply query strings to the
# base path.
location = urljoin(self._current_scheme_host + self.path, location)
return iri_to_uri(location)
@cached_property
def _current_scheme_host(self):
return "{}://{}".format(self.scheme, self.get_host())
def _get_scheme(self):
"""
Hook for subclasses like WSGIRequest to implement. Return 'http' by
default.
"""
return "http"
@property
def scheme(self):
if settings.SECURE_PROXY_SSL_HEADER:
try:
header, secure_value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured(
"The SECURE_PROXY_SSL_HEADER setting must be a tuple containing "
"two values."
)
header_value = self.META.get(header)
if header_value is not None:
header_value, *_ = header_value.split(",", 1)
return "https" if header_value.strip() == secure_value else "http"
return self._get_scheme()
def is_secure(self):
return self.scheme == "https"
@property
def encoding(self):
return self._encoding
@encoding.setter
def encoding(self, val):
"""
Set the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, remove and recreate it on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, "GET"):
del self.GET
if hasattr(self, "_post"):
del self._post
def _initialize_handlers(self):
self._upload_handlers = [
uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS
]
@property
def upload_handlers(self):
if not self._upload_handlers:
# If there are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
@upload_handlers.setter
def upload_handlers(self, upload_handlers):
if hasattr(self, "_files"):
raise AttributeError(
"You cannot set the upload handlers after the upload has been "
"processed."
)
self._upload_handlers = upload_handlers
def parse_file_upload(self, META, post_data):
"""Return a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning=(
"You cannot alter upload handlers after the upload has been "
"processed."
),
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
@property
def body(self):
if not hasattr(self, "_body"):
if self._read_started:
raise RawPostDataException(
"You cannot access body after reading from request's data stream"
)
# Limit the maximum request data size that will be handled in-memory.
if (
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
and int(self.META.get("CONTENT_LENGTH") or 0)
> settings.DATA_UPLOAD_MAX_MEMORY_SIZE
):
raise RequestDataTooBig(
"Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
)
try:
self._body = self.read()
except OSError as e:
raise UnreadablePostError(*e.args) from e
finally:
self._stream.close()
self._stream = BytesIO(self._body)
return self._body
def _mark_post_parse_error(self):
self._post = QueryDict()
self._files = MultiValueDict()
def _load_post_and_files(self):
"""Populate self._post and self._files if the content-type is a form type"""
if self.method != "POST":
self._post, self._files = (
QueryDict(encoding=self._encoding),
MultiValueDict(),
)
return
if self._read_started and not hasattr(self, "_body"):
self._mark_post_parse_error()
return
if self.content_type == "multipart/form-data":
if hasattr(self, "_body"):
# Use already read data
data = BytesIO(self._body)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except MultiPartParserError:
# An error occurred while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
self._mark_post_parse_error()
raise
elif self.content_type == "application/x-www-form-urlencoded":
self._post, self._files = (
QueryDict(self.body, encoding=self._encoding),
MultiValueDict(),
)
else:
self._post, self._files = (
QueryDict(encoding=self._encoding),
MultiValueDict(),
)
def close(self):
if hasattr(self, "_files"):
for f in chain.from_iterable(list_[1] for list_ in self._files.lists()):
f.close()
# File-like and iterator interface.
#
# Expects self._stream to be set to an appropriate source of bytes by
# a corresponding request subclass (e.g. WSGIRequest).
# Also when request data has already been read by request.POST or
# request.body, self._stream points to a BytesIO instance
# containing that data.
def read(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.read(*args, **kwargs)
except OSError as e:
raise UnreadablePostError(*e.args) from e
def readline(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.readline(*args, **kwargs)
except OSError as e:
raise UnreadablePostError(*e.args) from e
def __iter__(self):
return iter(self.readline, b"")
def readlines(self):
return list(self)
class HttpHeaders(CaseInsensitiveMapping):
HTTP_PREFIX = "HTTP_"
# PEP 333 gives two headers which aren't prepended with HTTP_.
UNPREFIXED_HEADERS = {"CONTENT_TYPE", "CONTENT_LENGTH"}
def __init__(self, environ):
headers = {}
for header, value in environ.items():
name = self.parse_header_name(header)
if name:
headers[name] = value
super().__init__(headers)
def __getitem__(self, key):
"""Allow header lookup using underscores in place of hyphens."""
return super().__getitem__(key.replace("_", "-"))
@classmethod
def parse_header_name(cls, header):
if header.startswith(cls.HTTP_PREFIX):
header = header.removeprefix(cls.HTTP_PREFIX)
elif header not in cls.UNPREFIXED_HEADERS:
return None
return header.replace("_", "-").title()
@classmethod
def to_wsgi_name(cls, header):
header = header.replace("-", "_").upper()
if header in cls.UNPREFIXED_HEADERS:
return header
return f"{cls.HTTP_PREFIX}{header}"
@classmethod
def to_asgi_name(cls, header):
return header.replace("-", "_").upper()
@classmethod
def to_wsgi_names(cls, headers):
return {
cls.to_wsgi_name(header_name): value
for header_name, value in headers.items()
}
@classmethod
def to_asgi_names(cls, headers):
return {
cls.to_asgi_name(header_name): value
for header_name, value in headers.items()
}
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict which represents a query string.
A QueryDict can be used to represent GET or POST data. It subclasses
MultiValueDict since keys in such data can be repeated, for instance
in the data from a form with a <select multiple> field.
By default QueryDicts are immutable, though the copy() method
will always return a mutable copy.
Both keys and values set on this class are converted from the given encoding
(DEFAULT_CHARSET by default) to str.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string=None, mutable=False, encoding=None):
super().__init__()
self.encoding = encoding or settings.DEFAULT_CHARSET
query_string = query_string or ""
parse_qsl_kwargs = {
"keep_blank_values": True,
"encoding": self.encoding,
"max_num_fields": settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
}
if isinstance(query_string, bytes):
# query_string normally contains URL-encoded data, a subset of ASCII.
try:
query_string = query_string.decode(self.encoding)
except UnicodeDecodeError:
# ... but some user agents are misbehaving :-(
query_string = query_string.decode("iso-8859-1")
try:
for key, value in parse_qsl(query_string, **parse_qsl_kwargs):
self.appendlist(key, value)
except ValueError as e:
# ValueError can also be raised if the strict_parsing argument to
# parse_qsl() is True. As that is not used by Django, assume that
# the exception was raised by exceeding the value of max_num_fields
# instead of fragile checks of exception message strings.
raise TooManyFieldsSent(
"The number of GET/POST parameters exceeded "
"settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
) from e
self._mutable = mutable
@classmethod
def fromkeys(cls, iterable, value="", mutable=False, encoding=None):
"""
Return a new QueryDict with keys (may be repeated) from an iterable and
values from value.
"""
q = cls("", mutable=True, encoding=encoding)
for key in iterable:
q.appendlist(key, value)
if not mutable:
q._mutable = False
return q
@property
def encoding(self):
if self._encoding is None:
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
@encoding.setter
def encoding(self, value):
self._encoding = value
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().__setitem__(key, value)
def __delitem__(self, key):
self._assert_mutable()
super().__delitem__(key)
def __copy__(self):
result = self.__class__("", mutable=True, encoding=self.encoding)
for key, value in self.lists():
result.setlist(key, value)
return result
def __deepcopy__(self, memo):
result = self.__class__("", mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in self.lists():
result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
list_ = [bytes_to_text(elt, self.encoding) for elt in list_]
super().setlist(key, list_)
def setlistdefault(self, key, default_list=None):
self._assert_mutable()
return super().setlistdefault(key, default_list)
def appendlist(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().appendlist(key, value)
def pop(self, key, *args):
self._assert_mutable()
return super().pop(key, *args)
def popitem(self):
self._assert_mutable()
return super().popitem()
def clear(self):
self._assert_mutable()
super().clear()
def setdefault(self, key, default=None):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
default = bytes_to_text(default, self.encoding)
return super().setdefault(key, default)
def copy(self):
"""Return a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Return an encoded string of all query string arguments.
`safe` specifies characters which don't require quoting, for example::
>>> q = QueryDict(mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
safe = safe.encode(self.encoding)
def encode(k, v):
return "%s=%s" % ((quote(k, safe), quote(v, safe)))
else:
def encode(k, v):
return urlencode({k: v})
for k, list_ in self.lists():
output.extend(
encode(k.encode(self.encoding), str(v).encode(self.encoding))
for v in list_
)
return "&".join(output)
class MediaType:
def __init__(self, media_type_raw_line):
full_type, self.params = parse_header_parameters(
media_type_raw_line if media_type_raw_line else ""
)
self.main_type, _, self.sub_type = full_type.partition("/")
def __str__(self):
params_str = "".join("; %s=%s" % (k, v) for k, v in self.params.items())
return "%s%s%s" % (
self.main_type,
("/%s" % self.sub_type) if self.sub_type else "",
params_str,
)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__qualname__, self)
@property
def is_all_types(self):
return self.main_type == "*" and self.sub_type == "*"
def match(self, other):
if self.is_all_types:
return True
other = MediaType(other)
if self.main_type == other.main_type and self.sub_type in {"*", other.sub_type}:
return True
return False
# It's neither necessary nor appropriate to use
# django.utils.encoding.force_str() for parsing URLs and form inputs. Thus,
# this slightly more restricted function, used by QueryDict.
def bytes_to_text(s, encoding):
"""
Convert bytes objects to strings, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Return any non-bytes objects without change.
"""
if isinstance(s, bytes):
return str(s, encoding, "replace")
else:
return s
def split_domain_port(host):
"""
Return a (domain, port) tuple from a given host.
Returned domain is lowercased. If the host is invalid, the domain will be
empty.
"""
host = host.lower()
if not host_validation_re.match(host):
return "", ""
if host[-1] == "]":
# It's an IPv6 address without a port.
return host, ""
bits = host.rsplit(":", 1)
domain, port = bits if len(bits) == 2 else (bits[0], "")
# Remove a trailing dot (if present) from the domain.
domain = domain.removesuffix(".")
return domain, port
def validate_host(host, allowed_hosts):
"""
Validate the given host for this site.
Check that the host looks valid and matches a host or host pattern in the
given list of ``allowed_hosts``. Any pattern beginning with a period
matches a domain and all its subdomains (e.g. ``.example.com`` matches
``example.com`` and any subdomain), ``*`` matches anything, and anything
else must match exactly.
Note: This function assumes that the given host is lowercased and has
already had the port, if any, stripped off.
Return ``True`` for a valid host, ``False`` otherwise.
"""
return any(
pattern == "*" or is_same_domain(host, pattern) for pattern in allowed_hosts
)
def parse_accept_header(header):
return [MediaType(token) for token in header.split(",") if token.strip()]
|
d2e4b011cab2c2c9f516b74eb722234ac482084ab35b408355e6cffadcc80626 | import zoneinfo
from datetime import datetime
from datetime import timezone as datetime_timezone
from datetime import tzinfo
from django.template import Library, Node, TemplateSyntaxError
from django.utils import timezone
register = Library()
# HACK: datetime instances cannot be assigned new attributes. Define a subclass
# in order to define new attributes in do_timezone().
class datetimeobject(datetime):
pass
# Template filters
@register.filter
def localtime(value):
"""
Convert a datetime to local time in the active time zone.
This only makes sense within a {% localtime off %} block.
"""
return do_timezone(value, timezone.get_current_timezone())
@register.filter
def utc(value):
"""
Convert a datetime to UTC.
"""
return do_timezone(value, datetime_timezone.utc)
@register.filter("timezone")
def do_timezone(value, arg):
"""
Convert a datetime to local time in a given time zone.
The argument must be an instance of a tzinfo subclass or a time zone name.
Naive datetimes are assumed to be in local time in the default time zone.
"""
if not isinstance(value, datetime):
return ""
# Obtain a timezone-aware datetime
try:
if timezone.is_naive(value):
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
# Filters must never raise exceptionsm, so catch everything.
except Exception:
return ""
# Obtain a tzinfo instance
if isinstance(arg, tzinfo):
tz = arg
elif isinstance(arg, str):
try:
tz = zoneinfo.ZoneInfo(arg)
except zoneinfo.ZoneInfoNotFoundError:
return ""
else:
return ""
result = timezone.localtime(value, tz)
# HACK: the convert_to_local_time flag will prevent
# automatic conversion of the value to local time.
result = datetimeobject(
result.year,
result.month,
result.day,
result.hour,
result.minute,
result.second,
result.microsecond,
result.tzinfo,
)
result.convert_to_local_time = False
return result
# Template tags
class LocalTimeNode(Node):
"""
Template node class used by ``localtime_tag``.
"""
def __init__(self, nodelist, use_tz):
self.nodelist = nodelist
self.use_tz = use_tz
def render(self, context):
old_setting = context.use_tz
context.use_tz = self.use_tz
output = self.nodelist.render(context)
context.use_tz = old_setting
return output
class TimezoneNode(Node):
"""
Template node class used by ``timezone_tag``.
"""
def __init__(self, nodelist, tz):
self.nodelist = nodelist
self.tz = tz
def render(self, context):
with timezone.override(self.tz.resolve(context)):
output = self.nodelist.render(context)
return output
class GetCurrentTimezoneNode(Node):
"""
Template node class used by ``get_current_timezone_tag``.
"""
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = timezone.get_current_timezone_name()
return ""
@register.tag("localtime")
def localtime_tag(parser, token):
"""
Force or prevent conversion of datetime objects to local time,
regardless of the value of ``settings.USE_TZ``.
Sample usage::
{% localtime off %}{{ value_in_utc }}{% endlocaltime %}
"""
bits = token.split_contents()
if len(bits) == 1:
use_tz = True
elif len(bits) > 2 or bits[1] not in ("on", "off"):
raise TemplateSyntaxError("%r argument should be 'on' or 'off'" % bits[0])
else:
use_tz = bits[1] == "on"
nodelist = parser.parse(("endlocaltime",))
parser.delete_first_token()
return LocalTimeNode(nodelist, use_tz)
@register.tag("timezone")
def timezone_tag(parser, token):
"""
Enable a given time zone just for this block.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, the default time zone is
used within the block.
Sample usage::
{% timezone "Europe/Paris" %}
It is {{ now }} in Paris.
{% endtimezone %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (timezone)" % bits[0])
tz = parser.compile_filter(bits[1])
nodelist = parser.parse(("endtimezone",))
parser.delete_first_token()
return TimezoneNode(nodelist, tz)
@register.tag("get_current_timezone")
def get_current_timezone_tag(parser, token):
"""
Store the name of the current time zone in the context.
Usage::
{% get_current_timezone as TIME_ZONE %}
This will fetch the currently active time zone and put its name
into the ``TIME_ZONE`` context variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept
# variable as arguments.
args = token.contents.split()
if len(args) != 3 or args[1] != "as":
raise TemplateSyntaxError(
"'get_current_timezone' requires 'as variable' (got %r)" % args
)
return GetCurrentTimezoneNode(args[2])
|
181e7fbb7ca602dcf152d018f102dbfe9719af389ef59c4471afb536e2885c16 | from django.template import Library, Node, TemplateSyntaxError
from django.utils import formats
register = Library()
@register.filter(is_safe=False)
def localize(value):
"""
Force a value to be rendered as a localized value.
"""
return str(formats.localize(value, use_l10n=True))
@register.filter(is_safe=False)
def unlocalize(value):
"""
Force a value to be rendered as a non-localized value.
"""
return str(formats.localize(value, use_l10n=False))
class LocalizeNode(Node):
def __init__(self, nodelist, use_l10n):
self.nodelist = nodelist
self.use_l10n = use_l10n
def __repr__(self):
return "<%s>" % self.__class__.__name__
def render(self, context):
old_setting = context.use_l10n
context.use_l10n = self.use_l10n
output = self.nodelist.render(context)
context.use_l10n = old_setting
return output
@register.tag("localize")
def localize_tag(parser, token):
"""
Force or prevents localization of values.
Sample usage::
{% localize off %}
var pi = {{ 3.1415 }};
{% endlocalize %}
"""
use_l10n = None
bits = list(token.split_contents())
if len(bits) == 1:
use_l10n = True
elif len(bits) > 2 or bits[1] not in ("on", "off"):
raise TemplateSyntaxError("%r argument should be 'on' or 'off'" % bits[0])
else:
use_l10n = bits[1] == "on"
nodelist = parser.parse(("endlocalize",))
parser.delete_first_token()
return LocalizeNode(nodelist, use_l10n)
|
59a62f5949f9653111c23a2ebe49be7392f92d12dcf86a525a5d7dc05a1dfdb9 | from django.core.cache import InvalidCacheBackendError, caches
from django.core.cache.utils import make_template_fragment_key
from django.template import Library, Node, TemplateSyntaxError, VariableDoesNotExist
register = Library()
class CacheNode(Node):
def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name):
self.nodelist = nodelist
self.expire_time_var = expire_time_var
self.fragment_name = fragment_name
self.vary_on = vary_on
self.cache_name = cache_name
def render(self, context):
try:
expire_time = self.expire_time_var.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError(
'"cache" tag got an unknown variable: %r' % self.expire_time_var.var
)
if expire_time is not None:
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError(
'"cache" tag got a non-integer timeout value: %r' % expire_time
)
if self.cache_name:
try:
cache_name = self.cache_name.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError(
'"cache" tag got an unknown variable: %r' % self.cache_name.var
)
try:
fragment_cache = caches[cache_name]
except InvalidCacheBackendError:
raise TemplateSyntaxError(
"Invalid cache name specified for cache tag: %r" % cache_name
)
else:
try:
fragment_cache = caches["template_fragments"]
except InvalidCacheBackendError:
fragment_cache = caches["default"]
vary_on = [var.resolve(context) for var in self.vary_on]
cache_key = make_template_fragment_key(self.fragment_name, vary_on)
value = fragment_cache.get(cache_key)
if value is None:
value = self.nodelist.render(context)
fragment_cache.set(cache_key, value, expire_time)
return value
@register.tag("cache")
def do_cache(parser, token):
"""
This will cache the contents of a template fragment for a given amount
of time.
Usage::
{% load cache %}
{% cache [expire_time] [fragment_name] %}
.. some expensive processing ..
{% endcache %}
This tag also supports varying by a list of arguments::
{% load cache %}
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Optionally the cache to use may be specified thus::
{% cache .... using="cachename" %}
Each unique set of arguments will result in a unique cache entry.
"""
nodelist = parser.parse(("endcache",))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
if len(tokens) > 3 and tokens[-1].startswith("using="):
cache_name = parser.compile_filter(tokens[-1].removeprefix("using="))
tokens = tokens[:-1]
else:
cache_name = None
return CacheNode(
nodelist,
parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(t) for t in tokens[3:]],
cache_name,
)
|
0acdb0a2d4335cdd24da502a8930db7d49e5555eb6db8ea311c77500a665149f | from functools import wraps
def no_append_slash(view_func):
"""
Mark a view function as excluded from CommonMiddleware's APPEND_SLASH
redirection.
"""
# view_func.should_append_slash = False would also work, but decorators are
# nicer if they don't have side effects, so return a new function.
@wraps(view_func)
def wrapper_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapper_view.should_append_slash = False
return wrapper_view
|
e66586a5f379c53c6a289815616161eab28c1c7d70c8dbe32ec7c42468bace80 | from functools import wraps
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import decorator_from_middleware
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# Behave like CsrfViewMiddleware but don't reject requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = "requires_csrf_token"
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super().process_view(request, callback, callback_args, callback_kwargs)
# Force process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = "ensure_csrf_cookie"
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""Mark a view function as being exempt from the CSRF view protection."""
# view_func.csrf_exempt = True would also work, but decorators are nicer
# if they don't have side effects, so return a new function.
@wraps(view_func)
def wrapper_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapper_view.csrf_exempt = True
return wrapper_view
|
950f66b0b6bb3d5c379a9e08beea79bcf8dbed5a3ff46fe9697df847bc610fef | from django.core.exceptions import ImproperlyConfigured
from django.forms import Form
from django.forms import models as model_forms
from django.http import HttpResponseRedirect
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
from django.views.generic.detail import (
BaseDetailView,
SingleObjectMixin,
SingleObjectTemplateResponseMixin,
)
class FormMixin(ContextMixin):
"""Provide a way to show and handle a form in a request."""
initial = {}
form_class = None
success_url = None
prefix = None
def get_initial(self):
"""Return the initial data to use for forms on this view."""
return self.initial.copy()
def get_prefix(self):
"""Return the prefix to use for forms."""
return self.prefix
def get_form_class(self):
"""Return the form class to use."""
return self.form_class
def get_form(self, form_class=None):
"""Return an instance of the form to be used in this view."""
if form_class is None:
form_class = self.get_form_class()
return form_class(**self.get_form_kwargs())
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
kwargs = {
"initial": self.get_initial(),
"prefix": self.get_prefix(),
}
if self.request.method in ("POST", "PUT"):
kwargs.update(
{
"data": self.request.POST,
"files": self.request.FILES,
}
)
return kwargs
def get_success_url(self):
"""Return the URL to redirect to after processing a valid form."""
if not self.success_url:
raise ImproperlyConfigured("No URL to redirect to. Provide a success_url.")
return str(self.success_url) # success_url may be lazy
def form_valid(self, form):
"""If the form is valid, redirect to the supplied URL."""
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.render_to_response(self.get_context_data(form=form))
def get_context_data(self, **kwargs):
"""Insert the form into the context dict."""
if "form" not in kwargs:
kwargs["form"] = self.get_form()
return super().get_context_data(**kwargs)
class ModelFormMixin(FormMixin, SingleObjectMixin):
"""Provide a way to show and handle a ModelForm in a request."""
fields = None
def get_form_class(self):
"""Return the form class to use in this view."""
if self.fields is not None and self.form_class:
raise ImproperlyConfigured(
"Specifying both 'fields' and 'form_class' is not permitted."
)
if self.form_class:
return self.form_class
else:
if self.model is not None:
# If a model has been explicitly provided, use it
model = self.model
elif getattr(self, "object", None) is not None:
# If this view is operating on a single object, use
# the class of that object
model = self.object.__class__
else:
# Try to get a queryset and extract the model class
# from that
model = self.get_queryset().model
if self.fields is None:
raise ImproperlyConfigured(
"Using ModelFormMixin (base class of %s) without "
"the 'fields' attribute is prohibited." % self.__class__.__name__
)
return model_forms.modelform_factory(model, fields=self.fields)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
kwargs = super().get_form_kwargs()
if hasattr(self, "object"):
kwargs.update({"instance": self.object})
return kwargs
def get_success_url(self):
"""Return the URL to redirect to after processing a valid form."""
if self.success_url:
url = self.success_url.format(**self.object.__dict__)
else:
try:
url = self.object.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
"No URL to redirect to. Either provide a url or define"
" a get_absolute_url method on the Model."
)
return url
def form_valid(self, form):
"""If the form is valid, save the associated model."""
self.object = form.save()
return super().form_valid(form)
class ProcessFormView(View):
"""Render a form on GET and processes it on POST."""
def get(self, request, *args, **kwargs):
"""Handle GET requests: instantiate a blank version of the form."""
return self.render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
"""
Handle POST requests: instantiate a form instance with the passed
POST variables and then check if it's valid.
"""
form = self.get_form()
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
# PUT is a valid HTTP verb for creating (with a known URL) or editing an
# object, note that browsers only support POST for now.
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseFormView(FormMixin, ProcessFormView):
"""A base view for displaying a form."""
class FormView(TemplateResponseMixin, BaseFormView):
"""A view for displaying a form and rendering a template response."""
class BaseCreateView(ModelFormMixin, ProcessFormView):
"""
Base view for creating a new object instance.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = None
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = None
return super().post(request, *args, **kwargs)
class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView):
"""
View for creating a new object, with a response rendered by a template.
"""
template_name_suffix = "_form"
class BaseUpdateView(ModelFormMixin, ProcessFormView):
"""
Base view for updating an existing object.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super().post(request, *args, **kwargs)
class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView):
"""View for updating an object, with a response rendered by a template."""
template_name_suffix = "_form"
class DeletionMixin:
"""Provide the ability to delete objects."""
success_url = None
def delete(self, request, *args, **kwargs):
"""
Call the delete() method on the fetched object and then redirect to the
success URL.
"""
self.object = self.get_object()
success_url = self.get_success_url()
self.object.delete()
return HttpResponseRedirect(success_url)
# Add support for browsers which only accept GET and POST for now.
def post(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
def get_success_url(self):
if self.success_url:
return self.success_url.format(**self.object.__dict__)
else:
raise ImproperlyConfigured("No URL to redirect to. Provide a success_url.")
class BaseDeleteView(DeletionMixin, FormMixin, BaseDetailView):
"""
Base view for deleting an object.
Using this base class requires subclassing to provide a response mixin.
"""
form_class = Form
def post(self, request, *args, **kwargs):
# Set self.object before the usual form processing flow.
# Inlined because having DeletionMixin as the first base, for
# get_success_url(), makes leveraging super() with ProcessFormView
# overly complex.
self.object = self.get_object()
form = self.get_form()
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
success_url = self.get_success_url()
self.object.delete()
return HttpResponseRedirect(success_url)
class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView):
"""
View for deleting an object retrieved with self.get_object(), with a
response rendered by a template.
"""
template_name_suffix = "_confirm_delete"
|
33f94eeaaffdd90aeb3e84d8f68ba2f790508097cf95af5e75136e37955ce063 | import functools
from django.conf import settings
from django.urls import LocalePrefixPattern, URLResolver, get_resolver, path
from django.views.i18n import set_language
def i18n_patterns(*urls, prefix_default_language=True):
"""
Add the language code prefix to every URL pattern within this function.
This may only be used in the root URLconf, not in an included URLconf.
"""
if not settings.USE_I18N:
return list(urls)
return [
URLResolver(
LocalePrefixPattern(prefix_default_language=prefix_default_language),
list(urls),
)
]
@functools.cache
def is_language_prefix_patterns_used(urlconf):
"""
Return a tuple of two booleans: (
`True` if i18n_patterns() (LocalePrefixPattern) is used in the URLconf,
`True` if the default language should be prefixed
)
"""
for url_pattern in get_resolver(urlconf).url_patterns:
if isinstance(url_pattern.pattern, LocalePrefixPattern):
return True, url_pattern.pattern.prefix_default_language
return False, False
urlpatterns = [
path("setlang/", set_language, name="set_language"),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.